[ 451.403651] env[61978]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61978) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 451.403982] env[61978]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61978) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 451.404165] env[61978]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61978) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 451.404425] env[61978]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 451.503169] env[61978]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61978) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 451.513671] env[61978]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61978) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 452.114024] env[61978]: INFO nova.virt.driver [None req-ec585abf-1398-4e54-af1e-e44059245c44 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 452.184531] env[61978]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 452.184694] env[61978]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 452.184793] env[61978]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61978) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 455.345212] env[61978]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-8133c471-6d00-4d5b-b996-05908b86979b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.360474] env[61978]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61978) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 455.360616] env[61978]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-04a9cd8e-ff22-41bb-8695-168d121ed88c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.385253] env[61978]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 505e4. [ 455.385414] env[61978]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.201s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 455.385886] env[61978]: INFO nova.virt.vmwareapi.driver [None req-ec585abf-1398-4e54-af1e-e44059245c44 None None] VMware vCenter version: 7.0.3 [ 455.389210] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35952b1c-5628-486a-973d-cdd57659f08a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.405890] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072c48d8-4609-4651-ab31-46e87751db9c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.411671] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700ffd62-dfa7-4706-8a52-4acf69fc8768 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.418117] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02cbec87-c12c-488f-b90d-e7db1fc40ef4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.430825] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5d056b-f5af-4978-9899-82e93531db03 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.436559] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27d8c25-4580-4db1-b144-8e30f34f17fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.465861] env[61978]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-de580669-50b1-4c91-a6ad-997da9eecd6a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.470872] env[61978]: DEBUG nova.virt.vmwareapi.driver [None req-ec585abf-1398-4e54-af1e-e44059245c44 None None] Extension org.openstack.compute already exists. {{(pid=61978) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 455.473515] env[61978]: INFO nova.compute.provider_config [None req-ec585abf-1398-4e54-af1e-e44059245c44 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 455.977212] env[61978]: DEBUG nova.context [None req-ec585abf-1398-4e54-af1e-e44059245c44 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),55fa4c11-3ed2-49f8-ad14-0fde1fb442af(cell1) {{(pid=61978) load_cells /opt/stack/nova/nova/context.py:464}} [ 455.979316] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 455.979542] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 455.980288] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 455.980747] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Acquiring lock "55fa4c11-3ed2-49f8-ad14-0fde1fb442af" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 455.980935] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Lock "55fa4c11-3ed2-49f8-ad14-0fde1fb442af" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 455.981970] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Lock "55fa4c11-3ed2-49f8-ad14-0fde1fb442af" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 456.002957] env[61978]: INFO dbcounter [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Registered counter for database nova_cell0 [ 456.010982] env[61978]: INFO dbcounter [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Registered counter for database nova_cell1 [ 456.014201] env[61978]: DEBUG oslo_db.sqlalchemy.engines [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61978) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 456.014539] env[61978]: DEBUG oslo_db.sqlalchemy.engines [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61978) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 456.019232] env[61978]: ERROR nova.db.main.api [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 456.019232] env[61978]: result = function(*args, **kwargs) [ 456.019232] env[61978]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 456.019232] env[61978]: return func(*args, **kwargs) [ 456.019232] env[61978]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 456.019232] env[61978]: result = fn(*args, **kwargs) [ 456.019232] env[61978]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 456.019232] env[61978]: return f(*args, **kwargs) [ 456.019232] env[61978]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 456.019232] env[61978]: return db.service_get_minimum_version(context, binaries) [ 456.019232] env[61978]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 456.019232] env[61978]: _check_db_access() [ 456.019232] env[61978]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 456.019232] env[61978]: stacktrace = ''.join(traceback.format_stack()) [ 456.019232] env[61978]: [ 456.020058] env[61978]: ERROR nova.db.main.api [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 456.020058] env[61978]: result = function(*args, **kwargs) [ 456.020058] env[61978]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 456.020058] env[61978]: return func(*args, **kwargs) [ 456.020058] env[61978]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 456.020058] env[61978]: result = fn(*args, **kwargs) [ 456.020058] env[61978]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 456.020058] env[61978]: return f(*args, **kwargs) [ 456.020058] env[61978]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 456.020058] env[61978]: return db.service_get_minimum_version(context, binaries) [ 456.020058] env[61978]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 456.020058] env[61978]: _check_db_access() [ 456.020058] env[61978]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 456.020058] env[61978]: stacktrace = ''.join(traceback.format_stack()) [ 456.020058] env[61978]: [ 456.020449] env[61978]: WARNING nova.objects.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 456.020569] env[61978]: WARNING nova.objects.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Failed to get minimum service version for cell 55fa4c11-3ed2-49f8-ad14-0fde1fb442af [ 456.021022] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Acquiring lock "singleton_lock" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 456.021194] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Acquired lock "singleton_lock" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 456.021437] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Releasing lock "singleton_lock" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 456.021756] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Full set of CONF: {{(pid=61978) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 456.021897] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ******************************************************************************** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 456.022033] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Configuration options gathered from: {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 456.022170] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 456.022361] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 456.022489] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ================================================================================ {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 456.022698] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] allow_resize_to_same_host = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.022896] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] arq_binding_timeout = 300 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.023050] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] backdoor_port = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.023185] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] backdoor_socket = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.023356] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] block_device_allocate_retries = 60 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.023519] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] block_device_allocate_retries_interval = 3 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.023688] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cert = self.pem {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.023853] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.024033] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute_monitors = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.024207] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] config_dir = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.024381] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] config_drive_format = iso9660 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.024514] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.024680] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] config_source = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.024847] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] console_host = devstack {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.025023] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] control_exchange = nova {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.025186] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cpu_allocation_ratio = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.025345] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] daemon = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.025513] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] debug = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.025670] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] default_access_ip_network_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.025838] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] default_availability_zone = nova {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.025995] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] default_ephemeral_format = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.026169] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] default_green_pool_size = 1000 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.026407] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.026569] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] default_schedule_zone = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.026728] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] disk_allocation_ratio = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.026890] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] enable_new_services = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.027083] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] enabled_apis = ['osapi_compute'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.027256] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] enabled_ssl_apis = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.027413] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] flat_injected = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.027569] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] force_config_drive = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.027724] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] force_raw_images = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.027891] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] graceful_shutdown_timeout = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.028062] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] heal_instance_info_cache_interval = 60 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.028277] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] host = cpu-1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.028453] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.028616] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] initial_disk_allocation_ratio = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.028780] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] initial_ram_allocation_ratio = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.029010] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.029251] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] instance_build_timeout = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.029428] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] instance_delete_interval = 300 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.029600] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] instance_format = [instance: %(uuid)s] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.029805] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] instance_name_template = instance-%08x {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.029975] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] instance_usage_audit = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.030167] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] instance_usage_audit_period = month {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.030336] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.030503] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] instances_path = /opt/stack/data/nova/instances {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.030668] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] internal_service_availability_zone = internal {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.030847] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] key = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.031025] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] live_migration_retry_count = 30 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.031196] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] log_color = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.031362] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] log_config_append = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.031527] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.031686] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] log_dir = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.031843] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] log_file = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.031971] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] log_options = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.032154] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] log_rotate_interval = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.032327] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] log_rotate_interval_type = days {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.032494] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] log_rotation_type = none {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.032622] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.032748] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.032915] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.033091] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.033223] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.033386] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] long_rpc_timeout = 1800 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.033545] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] max_concurrent_builds = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.033703] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] max_concurrent_live_migrations = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.033862] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] max_concurrent_snapshots = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.034030] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] max_local_block_devices = 3 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.034193] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] max_logfile_count = 30 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.034352] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] max_logfile_size_mb = 200 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.034510] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] maximum_instance_delete_attempts = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.034680] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] metadata_listen = 0.0.0.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.034854] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] metadata_listen_port = 8775 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.035048] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] metadata_workers = 2 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.035217] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] migrate_max_retries = -1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.035386] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] mkisofs_cmd = genisoimage {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.035593] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] my_block_storage_ip = 10.180.1.21 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.035728] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] my_ip = 10.180.1.21 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.035895] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] network_allocate_retries = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.036083] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.036255] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] osapi_compute_listen = 0.0.0.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.036416] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] osapi_compute_listen_port = 8774 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.036584] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] osapi_compute_unique_server_name_scope = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.036752] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] osapi_compute_workers = 2 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.036918] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] password_length = 12 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.037089] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] periodic_enable = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.037253] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] periodic_fuzzy_delay = 60 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.037421] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] pointer_model = usbtablet {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.037589] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] preallocate_images = none {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.037747] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] publish_errors = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.037878] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] pybasedir = /opt/stack/nova {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.038048] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ram_allocation_ratio = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.038213] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] rate_limit_burst = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.038380] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] rate_limit_except_level = CRITICAL {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.038540] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] rate_limit_interval = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.038702] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] reboot_timeout = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.038862] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] reclaim_instance_interval = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.039032] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] record = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.039203] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] reimage_timeout_per_gb = 60 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.039368] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] report_interval = 120 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.039529] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] rescue_timeout = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.039689] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] reserved_host_cpus = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.039877] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] reserved_host_disk_mb = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.040053] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] reserved_host_memory_mb = 512 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.040219] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] reserved_huge_pages = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.040379] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] resize_confirm_window = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.040539] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] resize_fs_using_block_device = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.040698] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] resume_guests_state_on_host_boot = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.040894] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.041076] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] rpc_response_timeout = 60 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.041241] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] run_external_periodic_tasks = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.041411] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] running_deleted_instance_action = reap {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.041572] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] running_deleted_instance_poll_interval = 1800 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.041730] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] running_deleted_instance_timeout = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.041891] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] scheduler_instance_sync_interval = 120 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.042067] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] service_down_time = 720 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.042239] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] servicegroup_driver = db {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.042396] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] shell_completion = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.042557] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] shelved_offload_time = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.042719] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] shelved_poll_interval = 3600 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.042886] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] shutdown_timeout = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.043058] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] source_is_ipv6 = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.043221] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ssl_only = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.043468] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.043635] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] sync_power_state_interval = 600 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.043795] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] sync_power_state_pool_size = 1000 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.043963] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] syslog_log_facility = LOG_USER {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.044134] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] tempdir = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.044295] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] timeout_nbd = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.044460] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] transport_url = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.044621] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] update_resources_interval = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.044781] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] use_cow_images = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.044942] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] use_eventlog = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.045109] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] use_journal = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.045270] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] use_json = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.045427] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] use_rootwrap_daemon = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.045584] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] use_stderr = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.045740] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] use_syslog = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.045894] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vcpu_pin_set = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.046069] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plugging_is_fatal = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.046238] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plugging_timeout = 300 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.046406] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] virt_mkfs = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.046567] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] volume_usage_poll_interval = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.046729] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] watch_log_file = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.046898] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] web = /usr/share/spice-html5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.047090] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.047263] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.047429] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.047601] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_concurrency.disable_process_locking = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.047886] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.048075] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.048247] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.048415] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.048583] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.048749] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.048931] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.auth_strategy = keystone {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.049110] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.compute_link_prefix = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.049293] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.049466] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.dhcp_domain = novalocal {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.049633] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.enable_instance_password = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.049827] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.glance_link_prefix = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.050009] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.050191] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.050356] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.instance_list_per_project_cells = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.050522] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.list_records_by_skipping_down_cells = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.050688] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.local_metadata_per_cell = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.050889] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.max_limit = 1000 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.051077] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.metadata_cache_expiration = 15 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.051258] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.neutron_default_tenant_id = default {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.051429] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.response_validation = warn {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.051601] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.use_neutron_default_nets = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.051777] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.051940] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.052118] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.052294] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.052463] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.vendordata_dynamic_targets = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.052627] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.vendordata_jsonfile_path = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.052809] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.053008] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.backend = dogpile.cache.memcached {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.053182] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.backend_argument = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.053354] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.config_prefix = cache.oslo {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.053523] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.dead_timeout = 60.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.053687] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.debug_cache_backend = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.053846] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.enable_retry_client = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.054013] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.enable_socket_keepalive = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.054187] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.enabled = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.054353] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.enforce_fips_mode = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.054516] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.expiration_time = 600 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.054678] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.hashclient_retry_attempts = 2 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.054844] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.hashclient_retry_delay = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.055024] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.memcache_dead_retry = 300 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.055182] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.memcache_password = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.055343] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.055505] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.055665] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.memcache_pool_maxsize = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.055829] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.055990] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.memcache_sasl_enabled = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.056182] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.056350] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.memcache_socket_timeout = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.056509] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.memcache_username = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.056678] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.proxies = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.056842] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.redis_db = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.057007] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.redis_password = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.057183] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.redis_sentinel_service_name = mymaster {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.057356] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.057522] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.redis_server = localhost:6379 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.057688] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.redis_socket_timeout = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.057846] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.redis_username = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.058025] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.retry_attempts = 2 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.058192] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.retry_delay = 0.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.058353] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.socket_keepalive_count = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.058515] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.socket_keepalive_idle = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.058677] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.socket_keepalive_interval = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.058837] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.tls_allowed_ciphers = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.059008] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.tls_cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.059171] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.tls_certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.059332] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.tls_enabled = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.059489] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cache.tls_keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.059661] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.auth_section = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.059861] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.auth_type = password {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.060050] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.060232] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.catalog_info = volumev3::publicURL {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.060394] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.060560] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.collect_timing = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.060739] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.cross_az_attach = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.060945] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.debug = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.061132] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.endpoint_template = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.061300] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.http_retries = 3 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.061466] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.insecure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.061625] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.061798] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.os_region_name = RegionOne {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.061968] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.split_loggers = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.062140] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cinder.timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.062312] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.062471] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.cpu_dedicated_set = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.062630] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.cpu_shared_set = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.062793] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.image_type_exclude_list = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.062979] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.063174] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.max_concurrent_disk_ops = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.063340] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.max_disk_devices_to_attach = -1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.063504] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.063675] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.063844] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.resource_provider_association_refresh = 300 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.064017] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.064183] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.shutdown_retry_interval = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.064363] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.064539] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] conductor.workers = 2 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.064720] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] console.allowed_origins = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.064881] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] console.ssl_ciphers = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.065069] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] console.ssl_minimum_version = default {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.065243] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] consoleauth.enforce_session_timeout = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.065413] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] consoleauth.token_ttl = 600 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.065579] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.065737] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.065900] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.collect_timing = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.066069] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.connect_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.066232] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.connect_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.066390] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.endpoint_override = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.066551] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.insecure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.066707] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.066866] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.max_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.067031] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.min_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.067192] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.region_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.067352] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.retriable_status_codes = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.067507] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.service_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.067676] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.service_type = accelerator {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.067838] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.split_loggers = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.068008] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.status_code_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.068174] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.status_code_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.068336] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.068514] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.068675] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] cyborg.version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.068855] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.backend = sqlalchemy {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.069036] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.connection = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.069207] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.connection_debug = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.069378] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.connection_parameters = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.069542] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.connection_recycle_time = 3600 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.069705] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.connection_trace = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.069897] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.db_inc_retry_interval = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.070076] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.db_max_retries = 20 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.070245] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.db_max_retry_interval = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.070410] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.db_retry_interval = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.070575] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.max_overflow = 50 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.070783] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.max_pool_size = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.071012] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.max_retries = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.071201] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.071367] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.mysql_wsrep_sync_wait = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.071527] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.pool_timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.071692] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.retry_interval = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.071852] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.slave_connection = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.072028] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.sqlite_synchronous = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.072197] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] database.use_db_reconnect = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.072376] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.backend = sqlalchemy {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.072547] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.connection = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.072716] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.connection_debug = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.072885] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.connection_parameters = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.073059] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.connection_recycle_time = 3600 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.073224] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.connection_trace = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.073386] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.db_inc_retry_interval = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.073549] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.db_max_retries = 20 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.073711] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.db_max_retry_interval = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.073874] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.db_retry_interval = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.074046] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.max_overflow = 50 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.074210] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.max_pool_size = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.074373] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.max_retries = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.074544] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.074704] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.074862] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.pool_timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.075066] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.retry_interval = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.075238] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.slave_connection = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.075403] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] api_database.sqlite_synchronous = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.075580] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] devices.enabled_mdev_types = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.075759] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.075935] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ephemeral_storage_encryption.default_format = luks {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.076111] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ephemeral_storage_encryption.enabled = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.076276] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.076446] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.api_servers = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.076610] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.076772] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.076937] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.collect_timing = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.077111] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.connect_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.077274] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.connect_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.077436] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.debug = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.077600] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.default_trusted_certificate_ids = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.077788] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.enable_certificate_validation = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.077956] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.enable_rbd_download = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.078128] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.endpoint_override = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.078294] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.insecure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.078470] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.078674] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.max_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.078841] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.min_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.079018] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.num_retries = 3 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.079203] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.rbd_ceph_conf = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.079367] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.rbd_connect_timeout = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.079535] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.rbd_pool = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.079702] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.rbd_user = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.079891] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.region_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.080069] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.retriable_status_codes = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.080235] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.service_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.080405] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.service_type = image {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.080570] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.split_loggers = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.080736] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.status_code_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.080916] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.status_code_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.081095] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.081279] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.081444] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.verify_glance_signatures = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.081603] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] glance.version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.081770] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] guestfs.debug = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.081935] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] mks.enabled = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.082292] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.082481] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] image_cache.manager_interval = 2400 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.082652] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] image_cache.precache_concurrency = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.082822] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] image_cache.remove_unused_base_images = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.082995] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.083180] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.083361] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] image_cache.subdirectory_name = _base {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.083537] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.api_max_retries = 60 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.083699] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.api_retry_interval = 2 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.083860] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.auth_section = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084035] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.auth_type = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084199] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084356] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084517] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.collect_timing = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084679] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.conductor_group = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084839] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.connect_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084997] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.connect_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.085169] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.endpoint_override = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.085330] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.insecure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.085486] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.085645] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.max_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.085803] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.min_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086121] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.peer_list = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086121] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.region_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086288] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.retriable_status_codes = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086450] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.serial_console_state_timeout = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086608] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.service_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086774] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.service_type = baremetal {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086938] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.shard = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.087145] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.split_loggers = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.087314] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.status_code_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.087475] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.status_code_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.087650] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.087848] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.088020] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ironic.version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.088205] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.088378] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] key_manager.fixed_key = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.088558] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.088719] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.barbican_api_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.088878] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.barbican_endpoint = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.089067] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.barbican_endpoint_type = public {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.089234] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.barbican_region_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.089389] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.089545] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.089705] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.collect_timing = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.089895] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.insecure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.090071] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.090235] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.number_of_retries = 60 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.090397] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.retry_delay = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.090561] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.send_service_user_token = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.090727] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.split_loggers = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.090903] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.091086] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.verify_ssl = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.091249] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican.verify_ssl_path = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.091416] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican_service_user.auth_section = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.091581] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican_service_user.auth_type = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.091741] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican_service_user.cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.091901] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican_service_user.certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.092078] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican_service_user.collect_timing = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.092242] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican_service_user.insecure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.092398] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican_service_user.keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.092560] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican_service_user.split_loggers = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.092719] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] barbican_service_user.timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.092885] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vault.approle_role_id = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.093056] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vault.approle_secret_id = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.093231] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vault.kv_mountpoint = secret {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.093388] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vault.kv_path = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.093549] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vault.kv_version = 2 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.093705] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vault.namespace = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.093859] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vault.root_token_id = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.094023] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vault.ssl_ca_crt_file = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.094190] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vault.timeout = 60.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.094348] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vault.use_ssl = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.094513] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.094680] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.auth_section = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.094841] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.auth_type = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095013] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095176] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095337] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.collect_timing = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095494] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.connect_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095653] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.connect_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095809] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.endpoint_override = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095972] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.insecure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.096140] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.096297] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.max_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.096452] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.min_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.096606] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.region_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.096762] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.retriable_status_codes = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.096917] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.service_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.097094] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.service_type = identity {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.097257] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.split_loggers = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.097413] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.status_code_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.097572] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.status_code_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.097759] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.097950] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.098123] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] keystone.version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.098325] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.connection_uri = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.098488] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.cpu_mode = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.098652] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.cpu_model_extra_flags = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.098817] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.cpu_models = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.098994] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.cpu_power_governor_high = performance {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.099209] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.cpu_power_governor_low = powersave {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.099378] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.cpu_power_management = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.099547] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.099709] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.device_detach_attempts = 8 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.099895] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.device_detach_timeout = 20 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100077] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.disk_cachemodes = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100240] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.disk_prefix = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100403] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.enabled_perf_events = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100566] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.file_backed_memory = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100733] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.gid_maps = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100892] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.hw_disk_discard = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.101057] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.hw_machine_type = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.101229] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.images_rbd_ceph_conf = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.101393] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.101554] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.101720] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.images_rbd_glance_store_name = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.101886] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.images_rbd_pool = rbd {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.102068] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.images_type = default {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.102230] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.images_volume_group = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.102392] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.inject_key = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.102551] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.inject_partition = -2 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.102708] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.inject_password = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.102868] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.iscsi_iface = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.103039] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.iser_use_multipath = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.103206] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_bandwidth = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.103367] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.103525] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_downtime = 500 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.103685] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.103843] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.104008] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_inbound_addr = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.104185] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.104350] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_permit_post_copy = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.104510] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_scheme = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.104683] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_timeout_action = abort {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.104845] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_tunnelled = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.105013] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_uri = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.105174] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.live_migration_with_native_tls = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.105332] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.max_queues = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.105490] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.105719] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.105881] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.nfs_mount_options = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.106484] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.106674] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.106849] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.num_iser_scan_tries = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.107028] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.num_memory_encrypted_guests = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.107203] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.107371] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.num_pcie_ports = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.107541] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.num_volume_scan_tries = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.107709] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.pmem_namespaces = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.107871] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.quobyte_client_cfg = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.108170] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.108348] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.rbd_connect_timeout = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.108516] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.108680] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.108844] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.rbd_secret_uuid = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.109013] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.rbd_user = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.109183] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.109356] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.remote_filesystem_transport = ssh {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.109519] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.rescue_image_id = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.109682] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.rescue_kernel_id = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.109841] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.rescue_ramdisk_id = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.110022] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.110187] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.rx_queue_size = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.110358] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.smbfs_mount_options = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.110630] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.110804] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.snapshot_compression = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.110997] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.snapshot_image_format = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.111241] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.111414] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.sparse_logical_volumes = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.111580] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.swtpm_enabled = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.111751] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.swtpm_group = tss {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.111924] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.swtpm_user = tss {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.112107] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.sysinfo_serial = unique {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.112270] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.tb_cache_size = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.112429] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.tx_queue_size = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.112596] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.uid_maps = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.112758] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.use_virtio_for_bridges = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.112931] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.virt_type = kvm {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.113112] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.volume_clear = zero {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.113279] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.volume_clear_size = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.113445] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.volume_use_multipath = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.113605] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.vzstorage_cache_path = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.113773] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.113944] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.vzstorage_mount_group = qemu {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.114123] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.vzstorage_mount_opts = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.114290] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.114563] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.114752] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.vzstorage_mount_user = stack {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.115024] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.115224] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.auth_section = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.115403] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.auth_type = password {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.115570] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.115730] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.115895] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.collect_timing = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.116069] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.connect_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.116235] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.connect_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.116408] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.default_floating_pool = public {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.116569] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.endpoint_override = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.116731] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.extension_sync_interval = 600 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.116895] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.http_retries = 3 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.117066] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.insecure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.117226] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.117385] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.max_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.117553] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.117714] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.min_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.117882] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.ovs_bridge = br-int {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.118059] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.physnets = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.118232] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.region_name = RegionOne {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.118392] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.retriable_status_codes = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.118560] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.service_metadata_proxy = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.118724] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.service_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.118896] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.service_type = network {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.119091] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.split_loggers = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.119268] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.status_code_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.119429] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.status_code_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.119586] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.119785] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.119962] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] neutron.version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.120222] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] notifications.bdms_in_notifications = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.120403] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] notifications.default_level = INFO {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.120577] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] notifications.notification_format = unversioned {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.120745] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] notifications.notify_on_state_change = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.120943] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.121139] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] pci.alias = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.121311] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] pci.device_spec = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.121478] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] pci.report_in_placement = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.121650] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.auth_section = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.121823] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.auth_type = password {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.121992] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.122168] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.122324] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.122486] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.collect_timing = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.122646] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.connect_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.122803] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.connect_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.122979] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.default_domain_id = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.123153] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.default_domain_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.123310] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.domain_id = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.123466] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.domain_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.123623] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.endpoint_override = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.123783] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.insecure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.123939] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.124107] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.max_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.124266] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.min_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.124431] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.password = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.124589] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.project_domain_id = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.124755] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.project_domain_name = Default {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.124927] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.project_id = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.125111] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.project_name = service {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.125285] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.region_name = RegionOne {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.125449] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.retriable_status_codes = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.125608] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.service_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.125776] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.service_type = placement {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.125939] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.split_loggers = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126110] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.status_code_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126273] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.status_code_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126435] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.system_scope = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126596] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126753] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.trust_id = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126911] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.user_domain_id = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.127089] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.user_domain_name = Default {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.127251] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.user_id = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.127422] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.username = nova {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.127598] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.127758] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] placement.version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.127937] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.cores = 20 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.128116] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.count_usage_from_placement = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.128287] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.128459] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.injected_file_content_bytes = 10240 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.128626] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.injected_file_path_length = 255 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.128795] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.injected_files = 5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.128963] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.instances = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.129145] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.key_pairs = 100 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.129309] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.metadata_items = 128 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.129473] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.ram = 51200 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.129635] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.recheck_quota = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.129816] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.server_group_members = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.129989] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] quota.server_groups = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.130172] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.130337] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.130497] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] scheduler.image_metadata_prefilter = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.130656] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.130826] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] scheduler.max_attempts = 3 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.131020] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] scheduler.max_placement_results = 1000 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.131184] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.131346] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] scheduler.query_placement_for_image_type_support = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.131505] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.131676] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] scheduler.workers = 2 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.131847] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.132029] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.132210] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.132377] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.132542] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.132706] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.132879] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.133096] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.133272] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.host_subset_size = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.133435] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.133592] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.133755] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.133921] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.isolated_hosts = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.134092] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.isolated_images = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.134255] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.134413] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.134574] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.134734] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.pci_in_placement = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.134897] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.135068] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.135230] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.135389] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.135551] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.135712] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.135872] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.track_instance_changes = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.136064] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.136235] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] metrics.required = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.136399] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] metrics.weight_multiplier = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.136561] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.136724] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] metrics.weight_setting = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.137037] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.137215] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] serial_console.enabled = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.137390] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] serial_console.port_range = 10000:20000 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.137564] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.137731] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.137900] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] serial_console.serialproxy_port = 6083 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.138079] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] service_user.auth_section = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.138254] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] service_user.auth_type = password {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.138412] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] service_user.cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.138572] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] service_user.certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.138757] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] service_user.collect_timing = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.138932] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] service_user.insecure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.139104] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] service_user.keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.139275] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] service_user.send_service_user_token = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.139436] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] service_user.split_loggers = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.139608] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] service_user.timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.139801] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.agent_enabled = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.139978] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.enabled = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.140311] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.140512] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.140686] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.html5proxy_port = 6082 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.140865] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.image_compression = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.141078] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.jpeg_compression = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.141251] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.playback_compression = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.141415] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.require_secure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.141585] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.server_listen = 127.0.0.1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.141755] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.141915] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.streaming_mode = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.142088] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] spice.zlib_compression = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.142256] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] upgrade_levels.baseapi = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.142426] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] upgrade_levels.compute = auto {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.142585] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] upgrade_levels.conductor = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.142741] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] upgrade_levels.scheduler = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.142907] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vendordata_dynamic_auth.auth_section = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.143079] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vendordata_dynamic_auth.auth_type = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.143238] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vendordata_dynamic_auth.cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.143394] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vendordata_dynamic_auth.certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.143552] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.143714] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vendordata_dynamic_auth.insecure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.143866] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vendordata_dynamic_auth.keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.144089] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.144272] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vendordata_dynamic_auth.timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.144447] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.api_retry_count = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.144609] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.ca_file = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.144782] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.cache_prefix = devstack-image-cache {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.144955] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.cluster_name = testcl1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.145135] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.connection_pool_size = 10 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.145294] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.console_delay_seconds = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.145463] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.datastore_regex = ^datastore.* {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.145665] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.145841] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.host_password = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.146017] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.host_port = 443 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.146193] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.host_username = administrator@vsphere.local {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.146361] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.insecure = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.146524] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.integration_bridge = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.146691] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.maximum_objects = 100 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.146850] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.pbm_default_policy = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.147023] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.pbm_enabled = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.147180] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.pbm_wsdl_location = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.147349] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.147507] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.serial_port_proxy_uri = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.147666] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.serial_port_service_uri = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.147832] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.task_poll_interval = 0.5 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.148013] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.use_linked_clone = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.148371] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.vnc_keymap = en-us {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.148371] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.vnc_port = 5900 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.148498] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vmware.vnc_port_total = 10000 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.148679] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vnc.auth_schemes = ['none'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.148852] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vnc.enabled = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.149163] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.149352] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.149525] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vnc.novncproxy_port = 6080 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.149696] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vnc.server_listen = 127.0.0.1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.149897] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.150079] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vnc.vencrypt_ca_certs = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.150271] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vnc.vencrypt_client_cert = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.150446] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vnc.vencrypt_client_key = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.150622] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.150788] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.disable_deep_image_inspection = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.150978] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.151162] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.151323] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.151487] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.disable_rootwrap = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.151649] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.enable_numa_live_migration = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.151810] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.151974] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.152148] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.152308] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.libvirt_disable_apic = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.152466] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.152628] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.152787] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.152950] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.153122] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.153282] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.153445] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.153602] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.153761] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.153934] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.154126] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.154298] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] wsgi.client_socket_timeout = 900 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.154468] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] wsgi.default_pool_size = 1000 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.154634] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] wsgi.keep_alive = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.154805] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] wsgi.max_header_line = 16384 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.154970] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] wsgi.secure_proxy_ssl_header = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.155145] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] wsgi.ssl_ca_file = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.155306] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] wsgi.ssl_cert_file = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.155469] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] wsgi.ssl_key_file = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.155630] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] wsgi.tcp_keepidle = 600 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.155801] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.155978] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] zvm.ca_file = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.156172] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] zvm.cloud_connector_url = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.156451] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.156625] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] zvm.reachable_timeout = 300 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.156805] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_policy.enforce_new_defaults = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.157213] env[61978]: WARNING oslo_config.cfg [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 456.157405] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_policy.enforce_scope = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.157586] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_policy.policy_default_rule = default {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.157772] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.157951] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_policy.policy_file = policy.yaml {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.158138] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.158305] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.158462] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.158619] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.158783] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.158952] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.159140] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.159316] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler.connection_string = messaging:// {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.159484] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler.enabled = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.159654] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler.es_doc_type = notification {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.159855] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler.es_scroll_size = 10000 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.160054] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler.es_scroll_time = 2m {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.160225] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler.filter_error_trace = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.160395] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler.hmac_keys = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.160561] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler.sentinel_service_name = mymaster {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.160733] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler.socket_timeout = 0.1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.160926] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler.trace_requests = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.161106] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler.trace_sqlalchemy = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.161285] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler_jaeger.process_tags = {} {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.161446] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler_jaeger.service_name_prefix = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.161609] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] profiler_otlp.service_name_prefix = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.161776] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] remote_debug.host = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.161940] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] remote_debug.port = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.162158] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.162338] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.162506] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.162670] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.162835] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.163015] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.163174] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.163335] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.163495] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.163665] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.163824] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.163993] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.164173] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.164343] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.164515] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.164681] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.164843] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.165097] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.165291] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.165466] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.165638] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.165803] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.165971] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.166149] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.166312] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.166474] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.166636] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.166796] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.166967] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.167146] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.ssl = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.167321] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.167494] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.167656] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.167827] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.168016] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.ssl_version = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.168207] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.168397] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.168568] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_notifications.retry = -1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.168753] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.168963] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_messaging_notifications.transport_url = **** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.169170] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.auth_section = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.169337] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.auth_type = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.169498] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.cafile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.169659] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.certfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.169852] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.collect_timing = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170037] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.connect_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170203] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.connect_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170366] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.endpoint_id = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170521] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.endpoint_override = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170680] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.insecure = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170837] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.keyfile = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170992] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.max_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.171159] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.min_version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.171313] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.region_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.171471] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.retriable_status_codes = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.171626] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.service_name = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.171780] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.service_type = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.171941] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.split_loggers = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.172123] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.status_code_retries = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.172294] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.status_code_retry_delay = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.172449] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.timeout = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.172605] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.valid_interfaces = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.172759] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_limit.version = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.172924] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_reports.file_event_handler = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173097] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173259] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] oslo_reports.log_dir = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173429] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173587] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173744] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173910] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.174087] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.174248] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.174417] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.174574] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plug_ovs_privileged.group = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.174732] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.174899] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.175070] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.175232] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] vif_plug_ovs_privileged.user = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.175400] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_linux_bridge.flat_interface = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.175574] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.175747] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.175920] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.176105] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.176279] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.176444] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.176607] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.176785] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.176956] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_ovs.isolate_vif = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.177143] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.177310] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.177478] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.177646] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_ovs.ovsdb_interface = native {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.177807] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] os_vif_ovs.per_port_bridge = False {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.177975] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] privsep_osbrick.capabilities = [21] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.178145] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] privsep_osbrick.group = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.178302] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] privsep_osbrick.helper_command = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.178466] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.178628] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.178785] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] privsep_osbrick.user = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.178963] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.179125] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] nova_sys_admin.group = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.179283] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] nova_sys_admin.helper_command = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.179444] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.179604] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.179789] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] nova_sys_admin.user = None {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.179913] env[61978]: DEBUG oslo_service.service [None req-7ecd1122-5282-4372-91d8-b46f0ba15e0c None None] ******************************************************************************** {{(pid=61978) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 456.180392] env[61978]: INFO nova.service [-] Starting compute node (version 0.1.0) [ 456.683339] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Getting list of instances from cluster (obj){ [ 456.683339] env[61978]: value = "domain-c8" [ 456.683339] env[61978]: _type = "ClusterComputeResource" [ 456.683339] env[61978]: } {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 456.684503] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c4f49e-6954-4a8b-91d2-7f9d166b288d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.694016] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Got total of 0 instances {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 456.694602] env[61978]: WARNING nova.virt.vmwareapi.driver [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 456.695134] env[61978]: INFO nova.virt.node [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Generated node identity 44209228-3464-48ae-bc40-83eccd44b0cf [ 456.695386] env[61978]: INFO nova.virt.node [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Wrote node identity 44209228-3464-48ae-bc40-83eccd44b0cf to /opt/stack/data/n-cpu-1/compute_id [ 457.198359] env[61978]: WARNING nova.compute.manager [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Compute nodes ['44209228-3464-48ae-bc40-83eccd44b0cf'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 458.204460] env[61978]: INFO nova.compute.manager [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 459.210280] env[61978]: WARNING nova.compute.manager [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 459.210602] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 459.210749] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 459.210871] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 459.211035] env[61978]: DEBUG nova.compute.resource_tracker [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 459.211990] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774086cc-45eb-400b-9147-4cc18e29685b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.220538] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e04bb38-7728-4781-97ef-9e2d670a3629 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.233767] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a56c5a-23c0-4890-bd7c-dfc0c75ed596 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.240273] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0313dcaa-cda8-4d26-8461-190db211dd86 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.268532] env[61978]: DEBUG nova.compute.resource_tracker [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181510MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 459.268704] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 459.268867] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 459.771789] env[61978]: WARNING nova.compute.resource_tracker [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] No compute node record for cpu-1:44209228-3464-48ae-bc40-83eccd44b0cf: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 44209228-3464-48ae-bc40-83eccd44b0cf could not be found. [ 460.275397] env[61978]: INFO nova.compute.resource_tracker [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 44209228-3464-48ae-bc40-83eccd44b0cf [ 461.783441] env[61978]: DEBUG nova.compute.resource_tracker [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 461.783722] env[61978]: DEBUG nova.compute.resource_tracker [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 461.938417] env[61978]: INFO nova.scheduler.client.report [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] [req-0e6c0f01-e9bf-480f-9d89-5a90fb1ad129] Created resource provider record via placement API for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 461.955085] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e412cdf-962d-4fb4-bcf7-caa0cbfa1bba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 461.962591] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9ee9e5-98d1-4ad7-aeea-db3ffca5cb6e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 461.991779] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3687ce-12c4-46cd-a948-d950a20c61dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 461.998637] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d9c604-4990-436d-b95b-0222f8b347bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.011753] env[61978]: DEBUG nova.compute.provider_tree [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 462.549304] env[61978]: DEBUG nova.scheduler.client.report [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 462.549538] env[61978]: DEBUG nova.compute.provider_tree [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 0 to 1 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 462.549679] env[61978]: DEBUG nova.compute.provider_tree [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 462.607487] env[61978]: DEBUG nova.compute.provider_tree [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 1 to 2 during operation: update_traits {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 463.111994] env[61978]: DEBUG nova.compute.resource_tracker [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 463.112344] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.843s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 463.112344] env[61978]: DEBUG nova.service [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Creating RPC server for service compute {{(pid=61978) start /opt/stack/nova/nova/service.py:186}} [ 463.126820] env[61978]: DEBUG nova.service [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] Join ServiceGroup membership for this service compute {{(pid=61978) start /opt/stack/nova/nova/service.py:203}} [ 463.127022] env[61978]: DEBUG nova.servicegroup.drivers.db [None req-7524f73a-d102-4160-8fd4-1be5c2f474e9 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61978) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 463.127682] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_power_states {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 463.631055] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Getting list of instances from cluster (obj){ [ 463.631055] env[61978]: value = "domain-c8" [ 463.631055] env[61978]: _type = "ClusterComputeResource" [ 463.631055] env[61978]: } {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 463.632281] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76218c49-3914-442e-bd12-89aa26e1bf64 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 463.640622] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Got total of 0 instances {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 463.640846] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 463.641157] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Getting list of instances from cluster (obj){ [ 463.641157] env[61978]: value = "domain-c8" [ 463.641157] env[61978]: _type = "ClusterComputeResource" [ 463.641157] env[61978]: } {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 463.641983] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0bd877-952e-46f6-ae59-6c987388457e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 463.648836] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Got total of 0 instances {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 511.565069] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 511.565655] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 511.565859] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 511.566095] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 512.068950] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 512.069255] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 512.069500] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 512.069686] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 512.069872] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 512.070140] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 512.070364] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 512.070573] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 512.070733] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 512.574072] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.574072] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 512.574475] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 512.574475] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 512.575350] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90435ef-6e78-4092-9b87-491cc6007fec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.583525] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd2e13f-ad14-4fc5-95ad-910c3aca8950 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.596740] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54db460b-07ed-411a-87e7-de8832e684bb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.602971] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6e4306-a4ef-4471-8d2a-f48b5e7d7235 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.631567] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181511MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 512.631717] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.631878] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.649449] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 513.649449] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 513.663691] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4564a54e-f3d0-435b-bdac-c0e805b5e11d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.670701] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ccc38b0-5d86-4029-8920-acd593653928 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.698608] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba97eb7-3278-4722-8b2c-00435fdfa847 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.706912] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d37c89-b69c-45a8-92e3-b7142f18e5da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.720395] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 514.223353] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 514.728590] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 514.728953] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.097s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.715662] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 574.716074] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.223967] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.224178] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 575.224178] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 575.729164] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 575.729512] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.729568] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.729667] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.729811] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.729948] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.730111] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 575.730238] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 575.730387] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 576.234365] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.234537] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.234702] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.234854] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 576.235780] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c33fa52-b280-4651-a801-187a3b06131c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.244115] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbc9ab1-34f2-43be-88f0-2604c4956a3b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.257503] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb2fa11-f1f8-4315-addc-cf0cb2c86b87 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.263570] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f982bf-33ba-4846-bb95-38e9a375a35d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.291064] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181505MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 576.291216] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.291408] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.309839] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 577.309839] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 577.322172] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3579ad-2ae1-4199-82cc-adcda41a372d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.329898] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc21cc44-59da-4633-8e09-3128926a750c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.357634] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f026959-3026-48ca-8e73-fbf6dd8500ac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.363969] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50892a96-ff90-4efc-bc9a-94ab1759c4fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.376752] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.879480] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 577.880702] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 577.880884] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.589s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.882481] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.882760] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 637.882913] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 637.883069] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 638.386344] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 638.386562] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.386721] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.386867] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.387090] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.387261] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.387408] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.387538] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 638.387679] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.891051] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.891414] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.891507] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.891641] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 638.892617] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e0258f-a106-454e-bf67-3999fb3badf2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.900744] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653ccd1b-3a6f-466b-8a49-4aa71c712440 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.914081] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7969ad-4697-4364-8f1f-123d94927f01 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.920009] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a46dcb-015b-4b71-9a10-0812efe08477 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.948077] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181509MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 638.948218] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.948401] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.967170] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 639.967480] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 639.980995] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c494e6da-efea-4ce6-9971-fee281ebc3b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.988616] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db1f568-59b4-45e8-b610-dd9deab6d413 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.017230] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63603f7c-e969-4cae-b32d-ccf8303d4c67 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.024173] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdee4316-83b7-471d-9866-c35c3f778ee0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.036789] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.539691] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 640.540996] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 640.541190] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.211182] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 694.211567] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 694.716790] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 694.716974] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 694.717131] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 694.717291] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 695.222040] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.222435] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.222501] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.222642] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 695.223543] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98aa1c72-0bb7-454d-996a-4ef4cee7fafc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.231676] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e58e6df-befb-4715-b84f-e064c6399511 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.245197] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d71066-583f-4df2-87d7-73041d9e7985 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.250986] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b73eba5-5be9-4d6d-921c-3c5fb8b6ac9e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.278118] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181510MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 695.278272] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.278463] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.295645] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 696.295946] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 696.308340] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d360e9bc-b450-40e0-9cd9-6df475b0ee4f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.315687] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f98d21-6ac0-49d8-8bc5-9d9b87853818 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.343612] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6e391c-bd9f-4235-b549-9d2334cbe3e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.350406] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332f0890-98a6-42bb-9146-2ba2e2127d3a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.362756] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.865736] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 696.867021] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 696.867205] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.589s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.706899] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 697.707320] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 697.707320] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 698.210532] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 698.210788] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.210927] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.211074] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.211207] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 751.556288] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.556683] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Cleaning up deleted instances {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 752.059746] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] There are 0 instances to clean {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 752.059987] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 752.060136] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Cleaning up deleted instances with incomplete migration {{(pid=61978) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 752.562706] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.061031] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.061328] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.061461] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.061611] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.565259] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.565509] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.565674] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.565827] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 755.566825] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74309bf3-bfc8-4148-8649-0c83464249de {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.575226] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9de9ed-6cb5-4776-896c-ff7217152060 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.589338] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2497dfa3-492d-4980-8f58-c5cb8b8d5355 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.595425] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a6fe61-5d65-4e40-88c2-b52614d9787d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.624017] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181498MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 755.624183] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.624358] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.641807] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 756.642102] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 756.655008] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9fcc8a-5aed-4e39-87a0-1733aba4616c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.662734] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9aa9c31-e477-48d3-9971-1bcdd4b81af7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.692527] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc7f040-4248-486e-83c3-78f7d0fba005 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.699861] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77529727-6bc5-4c3f-b3c6-8d4ff265a814 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.713607] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.216898] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 757.218138] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 757.218357] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.714661] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.715088] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 757.715088] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 758.218774] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 758.219126] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.219390] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.219633] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.219880] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.220104] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 814.557622] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 814.557938] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 815.061052] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.061665] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.061665] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.061665] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 815.062465] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc57b23-9166-478a-8734-78b88b2f40db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.073055] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4278341-319d-42e4-a574-29a296f080fb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.092020] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a5c4eb-c2f8-4e8d-a568-944b1888eca0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.100147] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cab8f11-477e-471b-864e-463173a14835 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.133893] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181496MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 815.134206] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.134601] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.730706] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 816.730989] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 816.754524] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 816.771259] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 816.771259] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 816.787451] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 816.817755] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 816.837904] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84be86c6-78db-4c59-abac-a27041ef9322 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.847867] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822391da-89be-4866-b57e-fc39c20eb8ba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.883258] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2d5557-fc7a-459d-bc83-d4ed1ef1ac84 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.892031] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45fbb7c-8dea-4797-8904-cb00a84ff3fa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.906939] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.412396] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 817.412396] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 817.412396] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.278s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.406410] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.917477] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.917477] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 818.917477] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 819.419724] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 819.422302] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.422511] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.422659] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.422842] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.423018] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.423163] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.423294] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 822.689635] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.690050] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.194181] env[61978]: DEBUG nova.compute.manager [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 823.572158] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquiring lock "96a38ed0-c880-4f21-9389-99f039279072" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.572927] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lock "96a38ed0-c880-4f21-9389-99f039279072" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.740331] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.744200] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.744200] env[61978]: INFO nova.compute.claims [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.078402] env[61978]: DEBUG nova.compute.manager [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 824.610959] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.848945] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9cf1e54-b5d1-4af4-90c1-68ab33fe5f9f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.862728] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e8a86e-7a23-4ee3-b286-08e9eef9005d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.909561] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9a829e-1912-4c87-92ae-7ae851b52e54 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.919720] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7daac9d6-2f66-4457-905c-6ed6550787bf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.944604] env[61978]: DEBUG nova.compute.provider_tree [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.105891] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "892b03e7-a9fc-4b53-bffd-d8b090cbb9ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.106039] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "892b03e7-a9fc-4b53-bffd-d8b090cbb9ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.451354] env[61978]: DEBUG nova.scheduler.client.report [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 825.587516] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "e9e2deb5-5bf9-4b57-832f-9928d3cda162" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.587853] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "e9e2deb5-5bf9-4b57-832f-9928d3cda162" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.589206] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "e249c706-3196-4593-ae96-53f2619e0243" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.589426] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "e249c706-3196-4593-ae96-53f2619e0243" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.608621] env[61978]: DEBUG nova.compute.manager [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 825.957767] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.217s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.958430] env[61978]: DEBUG nova.compute.manager [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 825.961823] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.352s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.963326] env[61978]: INFO nova.compute.claims [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.094423] env[61978]: DEBUG nova.compute.manager [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 826.102917] env[61978]: DEBUG nova.compute.manager [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 826.145017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.471221] env[61978]: DEBUG nova.compute.utils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 826.477128] env[61978]: DEBUG nova.compute.manager [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 826.477128] env[61978]: DEBUG nova.network.neutron [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 826.630299] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.636135] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.774240] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Acquiring lock "66ee1fd7-40f7-461f-b0c6-5951a58ac660" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.774520] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Lock "66ee1fd7-40f7-461f-b0c6-5951a58ac660" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.852310] env[61978]: DEBUG nova.policy [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74b99d5453e243ada5c84c82947dcba7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00c674bbf1e945ba946d844f9856fdfc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 826.977097] env[61978]: DEBUG nova.compute.manager [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 827.130518] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be946866-c454-44c8-ad36-7695f0fa0122 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.139242] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26616f74-679c-436b-9957-c2938c307c44 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.180163] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3768d3d8-c8bc-4546-8e18-93705ebfecf3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.189536] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c349155-d38d-437a-8dff-40e6ccd8335d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.204404] env[61978]: DEBUG nova.compute.provider_tree [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.277618] env[61978]: DEBUG nova.compute.manager [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 827.462835] env[61978]: DEBUG nova.network.neutron [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Successfully created port: a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.709320] env[61978]: DEBUG nova.scheduler.client.report [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 827.809175] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.995293] env[61978]: DEBUG nova.compute.manager [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 828.135806] env[61978]: DEBUG nova.virt.hardware [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 828.136131] env[61978]: DEBUG nova.virt.hardware [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 828.137018] env[61978]: DEBUG nova.virt.hardware [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.137018] env[61978]: DEBUG nova.virt.hardware [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 828.137018] env[61978]: DEBUG nova.virt.hardware [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.137018] env[61978]: DEBUG nova.virt.hardware [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 828.137330] env[61978]: DEBUG nova.virt.hardware [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 828.137523] env[61978]: DEBUG nova.virt.hardware [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 828.137824] env[61978]: DEBUG nova.virt.hardware [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 828.138142] env[61978]: DEBUG nova.virt.hardware [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 828.138409] env[61978]: DEBUG nova.virt.hardware [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 828.139341] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367dd0a2-304e-480a-b8e8-8359bda1d5b7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.148841] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ed9e6a-3ae8-4949-9580-368e9bf516d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.168144] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9509d086-dd3b-4988-93b4-29eaf1da968d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.216647] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.217375] env[61978]: DEBUG nova.compute.manager [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 828.221122] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.076s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.222544] env[61978]: INFO nova.compute.claims [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.555120] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Acquiring lock "5d48e854-45fd-4767-91b7-100f84bdca55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.555342] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Lock "5d48e854-45fd-4767-91b7-100f84bdca55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.731444] env[61978]: DEBUG nova.compute.utils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 828.737106] env[61978]: DEBUG nova.compute.manager [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Not allocating networking since 'none' was specified. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 829.061052] env[61978]: DEBUG nova.compute.manager [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 829.240210] env[61978]: DEBUG nova.compute.manager [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 829.476577] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef64ecc-0673-4681-84fa-b68ddb105d18 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.487582] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae3310c-0e8c-4b9d-b59b-0934dcc30f94 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.526772] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3b7feb-2ca4-4773-a6ea-8f80a0b510c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.536138] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2efbf2b-0b8e-42b7-84a8-c6d31a5c68a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.550103] env[61978]: DEBUG nova.compute.provider_tree [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.587429] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.817702] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "b26a4784-698d-477a-8db7-58156899d231" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.817702] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "b26a4784-698d-477a-8db7-58156899d231" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.913026] env[61978]: DEBUG nova.network.neutron [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Successfully updated port: a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 830.057935] env[61978]: DEBUG nova.scheduler.client.report [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 830.253561] env[61978]: DEBUG nova.compute.manager [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 830.285777] env[61978]: DEBUG nova.virt.hardware [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 830.286072] env[61978]: DEBUG nova.virt.hardware [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 830.286204] env[61978]: DEBUG nova.virt.hardware [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.286384] env[61978]: DEBUG nova.virt.hardware [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 830.286529] env[61978]: DEBUG nova.virt.hardware [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.287273] env[61978]: DEBUG nova.virt.hardware [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 830.287273] env[61978]: DEBUG nova.virt.hardware [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 830.287273] env[61978]: DEBUG nova.virt.hardware [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 830.287420] env[61978]: DEBUG nova.virt.hardware [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 830.287866] env[61978]: DEBUG nova.virt.hardware [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 830.288076] env[61978]: DEBUG nova.virt.hardware [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 830.289081] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334a920d-b828-4e5f-a6c7-28088d07c23d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.306014] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040f7b77-f3a8-4e04-a98a-2a3910710957 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.318591] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Instance VIF info [] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 830.329769] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.330207] env[61978]: DEBUG nova.compute.manager [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 830.333841] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ccb23fc-6b9b-454d-aa0c-6cfff4b1c842 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.351029] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Created folder: OpenStack in parent group-v4. [ 830.351228] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Creating folder: Project (336e650716c14e45b1b7d32edd36081c). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.351469] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12384fe6-2146-44d4-901f-7884740cb0f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.364195] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Created folder: Project (336e650716c14e45b1b7d32edd36081c) in parent group-v295764. [ 830.364195] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Creating folder: Instances. Parent ref: group-v295765. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.364659] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5a8b110-7fd6-49b4-af22-b89167ddac59 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.375141] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Created folder: Instances in parent group-v295765. [ 830.375565] env[61978]: DEBUG oslo.service.loopingcall [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.375609] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 830.376504] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db78b5ad-c0fd-440c-ba8d-e9a5bdac5237 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.392697] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 830.392697] env[61978]: value = "task-1394523" [ 830.392697] env[61978]: _type = "Task" [ 830.392697] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.401134] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394523, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.416999] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.416999] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquired lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.416999] env[61978]: DEBUG nova.network.neutron [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.562278] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.562814] env[61978]: DEBUG nova.compute.manager [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 830.567715] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.937s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.568033] env[61978]: INFO nova.compute.claims [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.906193] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394523, 'name': CreateVM_Task, 'duration_secs': 0.357178} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.906575] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 830.908143] env[61978]: DEBUG oslo_vmware.service [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f31fca-ed9f-4e04-8c37-9d254d585e55 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.915208] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.915680] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.916661] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 830.917064] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38cbd2be-35c6-4f77-b0d5-ad6ad8c7dea2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.926040] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 830.926040] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e55bbd-7352-a9b8-ce08-c76716cebf84" [ 830.926040] env[61978]: _type = "Task" [ 830.926040] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.938098] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e55bbd-7352-a9b8-ce08-c76716cebf84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.943178] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.018553] env[61978]: DEBUG nova.network.neutron [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.072675] env[61978]: DEBUG nova.compute.utils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 831.074078] env[61978]: DEBUG nova.compute.manager [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 831.074487] env[61978]: DEBUG nova.network.neutron [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 831.133416] env[61978]: DEBUG nova.policy [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8f50bac42274555ab08e047cdb028ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43ebac7c44604f55b94cbc06648f4908', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 831.247198] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "92eb5edb-803b-48d4-8c4f-338d7c3b3d13" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.247502] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "92eb5edb-803b-48d4-8c4f-338d7c3b3d13" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.311877] env[61978]: DEBUG nova.network.neutron [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updating instance_info_cache with network_info: [{"id": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "address": "fa:16:3e:ed:43:9e", "network": {"id": "4b7af29e-cc4a-4765-8d05-6adf88573ad3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1006801081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00c674bbf1e945ba946d844f9856fdfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5290cfd-6d", "ovs_interfaceid": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.439100] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.439643] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 831.439948] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.440140] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.440565] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 831.440847] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-541c2097-c4b0-41e0-b7f3-21114e458110 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.459821] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 831.460111] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 831.463088] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93e7036-567c-48a0-9228-5582c6af9084 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.472204] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edac27b5-a42d-4f6f-be7b-9f9fd60d7f36 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.477675] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 831.477675] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c22825-b2e2-ce88-d572-2d9c34dc3077" [ 831.477675] env[61978]: _type = "Task" [ 831.477675] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.488128] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c22825-b2e2-ce88-d572-2d9c34dc3077, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.586014] env[61978]: DEBUG nova.compute.manager [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 831.591675] env[61978]: DEBUG nova.network.neutron [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Successfully created port: 3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.750656] env[61978]: DEBUG nova.compute.manager [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 831.816564] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Releasing lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.816564] env[61978]: DEBUG nova.compute.manager [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Instance network_info: |[{"id": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "address": "fa:16:3e:ed:43:9e", "network": {"id": "4b7af29e-cc4a-4765-8d05-6adf88573ad3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1006801081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00c674bbf1e945ba946d844f9856fdfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5290cfd-6d", "ovs_interfaceid": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 831.817344] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:43:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b67e519-46cf-44ce-b670-4ba4c0c5b658', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a5290cfd-6d88-4c49-a54c-626d4c4843bd', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.831740] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Creating folder: Project (00c674bbf1e945ba946d844f9856fdfc). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.837652] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dade1dfe-86a0-420d-a446-f527cd969ff2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.852285] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Created folder: Project (00c674bbf1e945ba946d844f9856fdfc) in parent group-v295764. [ 831.852545] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Creating folder: Instances. Parent ref: group-v295768. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.852954] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d599c3f-83b4-40ed-95a5-7990ef650f41 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.864481] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Created folder: Instances in parent group-v295768. [ 831.865847] env[61978]: DEBUG oslo.service.loopingcall [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 831.866119] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.866338] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5923b42b-2fd8-4263-9f07-81babb2ea7be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.886807] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3466b0-5974-4ba6-8f95-2574399a8ad9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.898322] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.898322] env[61978]: value = "task-1394526" [ 831.898322] env[61978]: _type = "Task" [ 831.898322] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.909337] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67aa88f-a8eb-44f6-83c5-5dbfb10ed76e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.915925] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394526, 'name': CreateVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.943880] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1148dc5-68ff-4acd-91d0-455d568d7fa9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.953021] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5017eeb9-d38b-443d-a0e7-ec38f6b0c2ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.968061] env[61978]: DEBUG nova.compute.provider_tree [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.989178] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Preparing fetch location {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 831.990588] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Creating directory with path [datastore2] vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 831.990588] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74836170-ab73-4ae9-97f8-390c1aae1087 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.003042] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Created directory with path [datastore2] vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.003645] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Fetch image to [datastore2] vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 832.003645] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Downloading image file data 4732143d-796a-4a66-9f1e-806f8b0654e0 to [datastore2] vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk on the data store datastore2 {{(pid=61978) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 832.004516] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8be7f8-c03d-4ea8-844e-d1a4be753037 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.013012] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc15ae9-e278-4254-8070-859ff737c2dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.023744] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b26763e-387a-4bf2-8099-631fa59976d1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.056635] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648a883e-e38d-4994-bb7f-a96a9a6bbaf1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.065516] env[61978]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6aa1a299-e134-43c4-9602-5dae6f8ec435 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.091022] env[61978]: DEBUG nova.compute.manager [req-8d21c914-bbb1-42d4-a038-9d87a65aa6f2 req-13b9e700-b1e4-4bde-93a3-2a0fc384d75a service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Received event network-vif-plugged-a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 832.091258] env[61978]: DEBUG oslo_concurrency.lockutils [req-8d21c914-bbb1-42d4-a038-9d87a65aa6f2 req-13b9e700-b1e4-4bde-93a3-2a0fc384d75a service nova] Acquiring lock "9b6b4da7-4f86-46bc-a75f-fc5e1126c53b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.091796] env[61978]: DEBUG oslo_concurrency.lockutils [req-8d21c914-bbb1-42d4-a038-9d87a65aa6f2 req-13b9e700-b1e4-4bde-93a3-2a0fc384d75a service nova] Lock "9b6b4da7-4f86-46bc-a75f-fc5e1126c53b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.092010] env[61978]: DEBUG oslo_concurrency.lockutils [req-8d21c914-bbb1-42d4-a038-9d87a65aa6f2 req-13b9e700-b1e4-4bde-93a3-2a0fc384d75a service nova] Lock "9b6b4da7-4f86-46bc-a75f-fc5e1126c53b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.092298] env[61978]: DEBUG nova.compute.manager [req-8d21c914-bbb1-42d4-a038-9d87a65aa6f2 req-13b9e700-b1e4-4bde-93a3-2a0fc384d75a service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] No waiting events found dispatching network-vif-plugged-a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 832.092467] env[61978]: WARNING nova.compute.manager [req-8d21c914-bbb1-42d4-a038-9d87a65aa6f2 req-13b9e700-b1e4-4bde-93a3-2a0fc384d75a service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Received unexpected event network-vif-plugged-a5290cfd-6d88-4c49-a54c-626d4c4843bd for instance with vm_state building and task_state spawning. [ 832.159805] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Downloading image file data 4732143d-796a-4a66-9f1e-806f8b0654e0 to the data store datastore2 {{(pid=61978) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 832.230836] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61978) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 832.319239] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.414497] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394526, 'name': CreateVM_Task, 'duration_secs': 0.352905} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.416367] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.426658] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.427486] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.427970] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 832.429127] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beb577c2-1fdb-4350-8b68-309f8bb8967c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.436540] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 832.436540] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52161d83-6acc-da4d-abc6-40dac86ccda3" [ 832.436540] env[61978]: _type = "Task" [ 832.436540] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.446297] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52161d83-6acc-da4d-abc6-40dac86ccda3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.471896] env[61978]: DEBUG nova.scheduler.client.report [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 832.604969] env[61978]: DEBUG nova.compute.manager [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 832.637379] env[61978]: DEBUG nova.virt.hardware [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 832.637777] env[61978]: DEBUG nova.virt.hardware [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 832.637894] env[61978]: DEBUG nova.virt.hardware [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.638085] env[61978]: DEBUG nova.virt.hardware [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 832.638237] env[61978]: DEBUG nova.virt.hardware [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.638405] env[61978]: DEBUG nova.virt.hardware [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 832.638616] env[61978]: DEBUG nova.virt.hardware [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 832.638887] env[61978]: DEBUG nova.virt.hardware [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 832.638937] env[61978]: DEBUG nova.virt.hardware [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 832.639107] env[61978]: DEBUG nova.virt.hardware [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 832.639300] env[61978]: DEBUG nova.virt.hardware [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 832.640268] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c576d6e-7fe2-426c-b95b-1218c57435ed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.653029] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec64a05-c72d-43d5-a13f-1987fcc94de1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.858286] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Completed reading data from the image iterator. {{(pid=61978) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 832.859119] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 832.947133] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.947422] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.947668] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.980340] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.980634] env[61978]: DEBUG nova.compute.manager [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 832.987043] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.348s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.989098] env[61978]: INFO nova.compute.claims [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.996839] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Downloaded image file data 4732143d-796a-4a66-9f1e-806f8b0654e0 to vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk on the data store datastore2 {{(pid=61978) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 832.998650] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Caching image {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 832.998888] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Copying Virtual Disk [datastore2] vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk to [datastore2] vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 832.999167] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9fa90e10-a69d-457f-94f9-dbbd214a65c3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.008650] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 833.008650] env[61978]: value = "task-1394527" [ 833.008650] env[61978]: _type = "Task" [ 833.008650] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.018432] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394527, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.152087] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "243e7146-46fc-43f4-a83b-cdc58f397f9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.152087] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "243e7146-46fc-43f4-a83b-cdc58f397f9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.497470] env[61978]: DEBUG nova.compute.utils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 833.500590] env[61978]: DEBUG nova.compute.manager [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 833.500590] env[61978]: DEBUG nova.network.neutron [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 833.524037] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394527, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.655189] env[61978]: DEBUG nova.compute.manager [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 833.670254] env[61978]: DEBUG nova.policy [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85c9b6562cd44989a9e9250bc0d7fdd4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ed07a0f23094421876c28a10c8adbe8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 833.726979] env[61978]: DEBUG nova.network.neutron [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Successfully updated port: 3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 833.871838] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "85fc5af8-454d-4042-841a-945b7e84eb6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.871838] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "85fc5af8-454d-4042-841a-945b7e84eb6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.007624] env[61978]: DEBUG nova.compute.manager [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 834.034472] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394527, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.679138} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.034472] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Copied Virtual Disk [datastore2] vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk to [datastore2] vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 834.034472] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Deleting the datastore file [datastore2] vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 834.034472] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fa54959-f2ef-467a-9df3-74ee5654459d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.043138] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 834.043138] env[61978]: value = "task-1394528" [ 834.043138] env[61978]: _type = "Task" [ 834.043138] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.061427] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394528, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.188967] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.197227] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Acquiring lock "2084a365-b662-4564-b899-ab4c4a63f2b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.197310] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Lock "2084a365-b662-4564-b899-ab4c4a63f2b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.229553] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "refresh_cache-892b03e7-a9fc-4b53-bffd-d8b090cbb9ed" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.229644] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "refresh_cache-892b03e7-a9fc-4b53-bffd-d8b090cbb9ed" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.229737] env[61978]: DEBUG nova.network.neutron [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 834.240881] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28b1bc0-3a6b-4833-8185-38b6dfc0e020 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.249497] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3060b295-34ec-4b47-a222-7d2d1b6812c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.285226] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fe4b11-a20d-479e-b435-7722fda7774a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.293185] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbd274e-6792-4440-9e65-afaa32e0711f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.308506] env[61978]: DEBUG nova.compute.provider_tree [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.438553] env[61978]: DEBUG nova.network.neutron [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Successfully created port: a1a8eaeb-61c3-4540-b925-e5516a063dbd {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.555338] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394528, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02315} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.555870] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 834.556159] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Moving file from [datastore2] vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee/4732143d-796a-4a66-9f1e-806f8b0654e0 to [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0. {{(pid=61978) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 834.556472] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-857467e4-bebb-41d6-8850-a54f60021cf2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.564769] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 834.564769] env[61978]: value = "task-1394529" [ 834.564769] env[61978]: _type = "Task" [ 834.564769] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.575514] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394529, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.813036] env[61978]: DEBUG nova.scheduler.client.report [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 834.827647] env[61978]: DEBUG nova.network.neutron [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.030630] env[61978]: DEBUG nova.compute.manager [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 835.055989] env[61978]: DEBUG nova.virt.hardware [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 835.055989] env[61978]: DEBUG nova.virt.hardware [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 835.055989] env[61978]: DEBUG nova.virt.hardware [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.056197] env[61978]: DEBUG nova.virt.hardware [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 835.056197] env[61978]: DEBUG nova.virt.hardware [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.056197] env[61978]: DEBUG nova.virt.hardware [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 835.056197] env[61978]: DEBUG nova.virt.hardware [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 835.056197] env[61978]: DEBUG nova.virt.hardware [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 835.056824] env[61978]: DEBUG nova.virt.hardware [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 835.057193] env[61978]: DEBUG nova.virt.hardware [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 835.057486] env[61978]: DEBUG nova.virt.hardware [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 835.058799] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ddb0ba4-08d3-4a1f-99a5-2051e357c790 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.074219] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712741f4-ff22-4ecd-8765-b1eae83ce431 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.084038] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394529, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.027636} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.093702] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] File moved {{(pid=61978) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 835.093702] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Cleaning up location [datastore2] vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 835.093702] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Deleting the datastore file [datastore2] vmware_temp/1956c11e-ce3a-47a0-8c2b-5e5f9b3183ee {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.093702] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91aef48a-bf3c-41f1-a985-f146cc766cc2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.101342] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 835.101342] env[61978]: value = "task-1394530" [ 835.101342] env[61978]: _type = "Task" [ 835.101342] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.108394] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394530, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.120260] env[61978]: DEBUG nova.network.neutron [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Updating instance_info_cache with network_info: [{"id": "3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33", "address": "fa:16:3e:67:ba:f7", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f1e490b-cb", "ovs_interfaceid": "3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.310418] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Acquiring lock "eb7cb200-c162-4e92-8916-6d9abd5cf34d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.310776] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Lock "eb7cb200-c162-4e92-8916-6d9abd5cf34d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.321505] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.338s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.322022] env[61978]: DEBUG nova.compute.manager [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 835.326480] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.518s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.327872] env[61978]: INFO nova.compute.claims [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 835.335717] env[61978]: DEBUG nova.compute.manager [req-38fde3c1-9a74-404e-a976-4694bcb1e605 req-6a578775-7211-41aa-ac9a-f96df4fa2af9 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Received event network-changed-a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.335856] env[61978]: DEBUG nova.compute.manager [req-38fde3c1-9a74-404e-a976-4694bcb1e605 req-6a578775-7211-41aa-ac9a-f96df4fa2af9 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Refreshing instance network info cache due to event network-changed-a5290cfd-6d88-4c49-a54c-626d4c4843bd. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 835.336144] env[61978]: DEBUG oslo_concurrency.lockutils [req-38fde3c1-9a74-404e-a976-4694bcb1e605 req-6a578775-7211-41aa-ac9a-f96df4fa2af9 service nova] Acquiring lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.336232] env[61978]: DEBUG oslo_concurrency.lockutils [req-38fde3c1-9a74-404e-a976-4694bcb1e605 req-6a578775-7211-41aa-ac9a-f96df4fa2af9 service nova] Acquired lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.336358] env[61978]: DEBUG nova.network.neutron [req-38fde3c1-9a74-404e-a976-4694bcb1e605 req-6a578775-7211-41aa-ac9a-f96df4fa2af9 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Refreshing network info cache for port a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 835.387027] env[61978]: DEBUG nova.compute.manager [req-a1e7267e-7911-4ecf-99aa-7d714217bc3d req-7ab9675d-f968-4de7-9814-a31f5081fe6c service nova] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Received event network-vif-plugged-3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.387245] env[61978]: DEBUG oslo_concurrency.lockutils [req-a1e7267e-7911-4ecf-99aa-7d714217bc3d req-7ab9675d-f968-4de7-9814-a31f5081fe6c service nova] Acquiring lock "892b03e7-a9fc-4b53-bffd-d8b090cbb9ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.387521] env[61978]: DEBUG oslo_concurrency.lockutils [req-a1e7267e-7911-4ecf-99aa-7d714217bc3d req-7ab9675d-f968-4de7-9814-a31f5081fe6c service nova] Lock "892b03e7-a9fc-4b53-bffd-d8b090cbb9ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.387758] env[61978]: DEBUG oslo_concurrency.lockutils [req-a1e7267e-7911-4ecf-99aa-7d714217bc3d req-7ab9675d-f968-4de7-9814-a31f5081fe6c service nova] Lock "892b03e7-a9fc-4b53-bffd-d8b090cbb9ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.387913] env[61978]: DEBUG nova.compute.manager [req-a1e7267e-7911-4ecf-99aa-7d714217bc3d req-7ab9675d-f968-4de7-9814-a31f5081fe6c service nova] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] No waiting events found dispatching network-vif-plugged-3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 835.388836] env[61978]: WARNING nova.compute.manager [req-a1e7267e-7911-4ecf-99aa-7d714217bc3d req-7ab9675d-f968-4de7-9814-a31f5081fe6c service nova] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Received unexpected event network-vif-plugged-3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33 for instance with vm_state building and task_state spawning. [ 835.613299] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394530, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023625} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.613299] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 835.613299] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aef18ba5-1d73-468d-94a0-8f03a3f9c9a3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.620013] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 835.620013] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cbb04b-9e7e-4e94-3a09-c24a2e88f1df" [ 835.620013] env[61978]: _type = "Task" [ 835.620013] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.629280] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "refresh_cache-892b03e7-a9fc-4b53-bffd-d8b090cbb9ed" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.629280] env[61978]: DEBUG nova.compute.manager [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Instance network_info: |[{"id": "3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33", "address": "fa:16:3e:67:ba:f7", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f1e490b-cb", "ovs_interfaceid": "3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 835.629428] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:ba:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed3ffc1d-9f86-4029-857e-6cd1d383edbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 835.640506] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Creating folder: Project (43ebac7c44604f55b94cbc06648f4908). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 835.641469] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f1e8438-252f-45c3-82a5-3893665a4c0d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.647632] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cbb04b-9e7e-4e94-3a09-c24a2e88f1df, 'name': SearchDatastore_Task, 'duration_secs': 0.012074} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.648327] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.648594] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 96a38ed0-c880-4f21-9389-99f039279072/96a38ed0-c880-4f21-9389-99f039279072.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.649271] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.649508] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.649692] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ade9d07-7871-4a46-aa92-dbc6e4feb798 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.652031] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea6ad927-30ab-45cf-b972-2b3472aafd8d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.657616] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Created folder: Project (43ebac7c44604f55b94cbc06648f4908) in parent group-v295764. [ 835.657798] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Creating folder: Instances. Parent ref: group-v295771. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 835.658030] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f24a09ca-8fe0-47ac-bdac-c9e6c1156642 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.662378] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 835.662378] env[61978]: value = "task-1394532" [ 835.662378] env[61978]: _type = "Task" [ 835.662378] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.671794] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394532, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.674653] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Created folder: Instances in parent group-v295771. [ 835.674653] env[61978]: DEBUG oslo.service.loopingcall [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.674653] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.674653] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 835.675169] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 835.675363] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f45abb1-3792-46d7-98aa-6e5e8291fcb5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.677619] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-177f3305-0542-40a1-9666-9e1a0f58d4b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.700456] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 835.700456] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52841a5e-2346-2f9a-947f-99709f4d9dc7" [ 835.700456] env[61978]: _type = "Task" [ 835.700456] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.701760] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 835.701760] env[61978]: value = "task-1394534" [ 835.701760] env[61978]: _type = "Task" [ 835.701760] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.712624] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52841a5e-2346-2f9a-947f-99709f4d9dc7, 'name': SearchDatastore_Task, 'duration_secs': 0.009738} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.716452] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394534, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.716452] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e6f6064-d900-47ef-a5b9-c3a969559cfb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.721730] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 835.721730] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5212e623-ae63-b7d9-0172-c06cbe1353fd" [ 835.721730] env[61978]: _type = "Task" [ 835.721730] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.732184] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5212e623-ae63-b7d9-0172-c06cbe1353fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.836683] env[61978]: DEBUG nova.compute.utils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 835.838288] env[61978]: DEBUG nova.compute.manager [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 835.838942] env[61978]: DEBUG nova.network.neutron [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.026604] env[61978]: DEBUG nova.policy [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e0391af85174f0393c20bb2f49738f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6337204785bd4d7d98711964c1823f52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 836.176374] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394532, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.214043] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394534, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.231715] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5212e623-ae63-b7d9-0172-c06cbe1353fd, 'name': SearchDatastore_Task, 'duration_secs': 0.010651} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.232622] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.232622] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b/9b6b4da7-4f86-46bc-a75f-fc5e1126c53b.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 836.232622] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53292c4e-656c-4609-b824-df31fced583b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.240781] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 836.240781] env[61978]: value = "task-1394535" [ 836.240781] env[61978]: _type = "Task" [ 836.240781] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.250170] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394535, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.344500] env[61978]: DEBUG nova.compute.manager [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 836.439306] env[61978]: DEBUG nova.network.neutron [req-38fde3c1-9a74-404e-a976-4694bcb1e605 req-6a578775-7211-41aa-ac9a-f96df4fa2af9 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updated VIF entry in instance network info cache for port a5290cfd-6d88-4c49-a54c-626d4c4843bd. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 836.439581] env[61978]: DEBUG nova.network.neutron [req-38fde3c1-9a74-404e-a976-4694bcb1e605 req-6a578775-7211-41aa-ac9a-f96df4fa2af9 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updating instance_info_cache with network_info: [{"id": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "address": "fa:16:3e:ed:43:9e", "network": {"id": "4b7af29e-cc4a-4765-8d05-6adf88573ad3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1006801081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00c674bbf1e945ba946d844f9856fdfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5290cfd-6d", "ovs_interfaceid": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.609404] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5463a0f-51fb-45e7-a5b3-415014bf5a86 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.618735] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab393b7e-39c2-48ed-95e9-0736f5e04f2a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.660318] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46faaf4c-d8b3-49b7-b982-73be09508560 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.672486] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74deaa21-a2ce-4e34-b8f8-0888d2bd180d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.684197] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394532, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560396} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.685196] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 96a38ed0-c880-4f21-9389-99f039279072/96a38ed0-c880-4f21-9389-99f039279072.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.685381] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.685822] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c11e0d28-1063-4bf0-bac1-bcd94843e833 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.695579] env[61978]: DEBUG nova.compute.provider_tree [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.706088] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 836.706088] env[61978]: value = "task-1394536" [ 836.706088] env[61978]: _type = "Task" [ 836.706088] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.717922] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394536, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.721125] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394534, 'name': CreateVM_Task, 'duration_secs': 0.599327} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.721652] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 836.725058] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.725058] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.725058] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 836.725058] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd1eb1da-1c39-49f5-a8a8-ce974ac929a3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.728344] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 836.728344] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520e3de4-01ec-9424-b4ff-ef0ccbbc8d8e" [ 836.728344] env[61978]: _type = "Task" [ 836.728344] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.737396] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520e3de4-01ec-9424-b4ff-ef0ccbbc8d8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.750136] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394535, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44763} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.750136] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b/9b6b4da7-4f86-46bc-a75f-fc5e1126c53b.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.750136] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.750401] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cdbe66ac-fe7a-4b12-a7ed-921ae23e4c21 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.757897] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 836.757897] env[61978]: value = "task-1394537" [ 836.757897] env[61978]: _type = "Task" [ 836.757897] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.770845] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394537, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.773049] env[61978]: DEBUG nova.network.neutron [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Successfully updated port: a1a8eaeb-61c3-4540-b925-e5516a063dbd {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 836.945751] env[61978]: DEBUG oslo_concurrency.lockutils [req-38fde3c1-9a74-404e-a976-4694bcb1e605 req-6a578775-7211-41aa-ac9a-f96df4fa2af9 service nova] Releasing lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.064998] env[61978]: DEBUG nova.network.neutron [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Successfully created port: 2bb74ad2-1c4e-4d05-ab88-06e859b1a378 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.200319] env[61978]: DEBUG nova.scheduler.client.report [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 837.221088] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394536, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068269} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.221358] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.222259] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896226b8-b2d3-41a7-bb76-dc113f41cfb5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.246555] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 96a38ed0-c880-4f21-9389-99f039279072/96a38ed0-c880-4f21-9389-99f039279072.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.250224] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43a9de07-2633-4b01-8632-6621e6266f5b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.274815] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520e3de4-01ec-9424-b4ff-ef0ccbbc8d8e, 'name': SearchDatastore_Task, 'duration_secs': 0.008677} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.276801] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.277060] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 837.277358] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.277475] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.277648] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 837.278179] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "refresh_cache-e9e2deb5-5bf9-4b57-832f-9928d3cda162" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.278286] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquired lock "refresh_cache-e9e2deb5-5bf9-4b57-832f-9928d3cda162" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.278421] env[61978]: DEBUG nova.network.neutron [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.280561] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 837.280561] env[61978]: value = "task-1394538" [ 837.280561] env[61978]: _type = "Task" [ 837.280561] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.284080] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2041ebdc-1230-4892-a364-015a999660f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.286975] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394537, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071371} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.292730] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.292730] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ea94c1-5b06-490f-8258-e67495ef8850 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.298980] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 837.298980] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 837.312852] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9736c885-aa84-4c45-9f25-ca377e58f0fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.316216] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.325775] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b/9b6b4da7-4f86-46bc-a75f-fc5e1126c53b.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.326483] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30f50b40-68b3-491c-bb74-28b9381e1482 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.346853] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 837.346853] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b11441-26c9-9e29-7a85-c07a72f50039" [ 837.346853] env[61978]: _type = "Task" [ 837.346853] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.348324] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 837.348324] env[61978]: value = "task-1394539" [ 837.348324] env[61978]: _type = "Task" [ 837.348324] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.352529] env[61978]: DEBUG nova.compute.manager [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 837.363231] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394539, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.366078] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b11441-26c9-9e29-7a85-c07a72f50039, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.385130] env[61978]: DEBUG nova.virt.hardware [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 837.385511] env[61978]: DEBUG nova.virt.hardware [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 837.385563] env[61978]: DEBUG nova.virt.hardware [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.386805] env[61978]: DEBUG nova.virt.hardware [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 837.386805] env[61978]: DEBUG nova.virt.hardware [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.386805] env[61978]: DEBUG nova.virt.hardware [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 837.386805] env[61978]: DEBUG nova.virt.hardware [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 837.386805] env[61978]: DEBUG nova.virt.hardware [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 837.386966] env[61978]: DEBUG nova.virt.hardware [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 837.386966] env[61978]: DEBUG nova.virt.hardware [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 837.386966] env[61978]: DEBUG nova.virt.hardware [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 837.388510] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788af30b-34aa-4b78-b4f1-a1d5259848d1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.399429] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906ab253-9bb8-4e63-ab51-a1d045d205f5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.708710] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.382s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.709258] env[61978]: DEBUG nova.compute.manager [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 837.711845] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.125s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.713705] env[61978]: INFO nova.compute.claims [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 837.803345] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.844838] env[61978]: DEBUG nova.network.neutron [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.878555] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b11441-26c9-9e29-7a85-c07a72f50039, 'name': SearchDatastore_Task, 'duration_secs': 0.023098} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.878802] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394539, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.882033] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9c57a67-8f2b-4f60-8e6a-3786ee1721fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.887695] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 837.887695] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5248ffe2-fa10-eb2a-c749-caabe0a9aa2c" [ 837.887695] env[61978]: _type = "Task" [ 837.887695] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.895187] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5248ffe2-fa10-eb2a-c749-caabe0a9aa2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.189170] env[61978]: DEBUG nova.network.neutron [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Updating instance_info_cache with network_info: [{"id": "a1a8eaeb-61c3-4540-b925-e5516a063dbd", "address": "fa:16:3e:01:ac:60", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.155", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1a8eaeb-61", "ovs_interfaceid": "a1a8eaeb-61c3-4540-b925-e5516a063dbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.220717] env[61978]: DEBUG nova.compute.utils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 838.223934] env[61978]: DEBUG nova.compute.manager [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 838.224495] env[61978]: DEBUG nova.network.neutron [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 838.301423] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394538, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.366505] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394539, 'name': ReconfigVM_Task, 'duration_secs': 0.630425} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.366720] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b/9b6b4da7-4f86-46bc-a75f-fc5e1126c53b.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.372142] env[61978]: DEBUG nova.policy [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e19064a7a4ff4462acc859f22770b389', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37fb55a44b11490f8d935d8f98a80909', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 838.374462] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25ae9c18-e59b-4828-a84c-293d59c8b61a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.382020] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 838.382020] env[61978]: value = "task-1394540" [ 838.382020] env[61978]: _type = "Task" [ 838.382020] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.393857] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394540, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.399738] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5248ffe2-fa10-eb2a-c749-caabe0a9aa2c, 'name': SearchDatastore_Task, 'duration_secs': 0.016399} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.399997] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.400268] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed/892b03e7-a9fc-4b53-bffd-d8b090cbb9ed.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 838.400517] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e672999c-5b30-42a3-9c47-95157e2c17a7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.408122] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 838.408122] env[61978]: value = "task-1394541" [ 838.408122] env[61978]: _type = "Task" [ 838.408122] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.418759] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.593350] env[61978]: DEBUG nova.compute.manager [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Received event network-vif-plugged-a1a8eaeb-61c3-4540-b925-e5516a063dbd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.593519] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] Acquiring lock "e9e2deb5-5bf9-4b57-832f-9928d3cda162-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.593799] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] Lock "e9e2deb5-5bf9-4b57-832f-9928d3cda162-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.594033] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] Lock "e9e2deb5-5bf9-4b57-832f-9928d3cda162-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.594217] env[61978]: DEBUG nova.compute.manager [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] No waiting events found dispatching network-vif-plugged-a1a8eaeb-61c3-4540-b925-e5516a063dbd {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 838.594522] env[61978]: WARNING nova.compute.manager [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Received unexpected event network-vif-plugged-a1a8eaeb-61c3-4540-b925-e5516a063dbd for instance with vm_state building and task_state spawning. [ 838.594718] env[61978]: DEBUG nova.compute.manager [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Received event network-changed-a1a8eaeb-61c3-4540-b925-e5516a063dbd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.594944] env[61978]: DEBUG nova.compute.manager [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Refreshing instance network info cache due to event network-changed-a1a8eaeb-61c3-4540-b925-e5516a063dbd. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 838.595152] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] Acquiring lock "refresh_cache-e9e2deb5-5bf9-4b57-832f-9928d3cda162" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.611412] env[61978]: DEBUG nova.compute.manager [req-de044f78-40d4-41b7-8bce-5d9333385603 req-09e1ec61-ab58-4ac2-82be-b400dccfa24d service nova] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Received event network-changed-3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.611602] env[61978]: DEBUG nova.compute.manager [req-de044f78-40d4-41b7-8bce-5d9333385603 req-09e1ec61-ab58-4ac2-82be-b400dccfa24d service nova] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Refreshing instance network info cache due to event network-changed-3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 838.611830] env[61978]: DEBUG oslo_concurrency.lockutils [req-de044f78-40d4-41b7-8bce-5d9333385603 req-09e1ec61-ab58-4ac2-82be-b400dccfa24d service nova] Acquiring lock "refresh_cache-892b03e7-a9fc-4b53-bffd-d8b090cbb9ed" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.612034] env[61978]: DEBUG oslo_concurrency.lockutils [req-de044f78-40d4-41b7-8bce-5d9333385603 req-09e1ec61-ab58-4ac2-82be-b400dccfa24d service nova] Acquired lock "refresh_cache-892b03e7-a9fc-4b53-bffd-d8b090cbb9ed" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.612206] env[61978]: DEBUG nova.network.neutron [req-de044f78-40d4-41b7-8bce-5d9333385603 req-09e1ec61-ab58-4ac2-82be-b400dccfa24d service nova] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Refreshing network info cache for port 3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.694481] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Releasing lock "refresh_cache-e9e2deb5-5bf9-4b57-832f-9928d3cda162" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.694942] env[61978]: DEBUG nova.compute.manager [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Instance network_info: |[{"id": "a1a8eaeb-61c3-4540-b925-e5516a063dbd", "address": "fa:16:3e:01:ac:60", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.155", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1a8eaeb-61", "ovs_interfaceid": "a1a8eaeb-61c3-4540-b925-e5516a063dbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 838.695441] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] Acquired lock "refresh_cache-e9e2deb5-5bf9-4b57-832f-9928d3cda162" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.695714] env[61978]: DEBUG nova.network.neutron [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Refreshing network info cache for port a1a8eaeb-61c3-4540-b925-e5516a063dbd {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.698640] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:ac:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1a8eaeb-61c3-4540-b925-e5516a063dbd', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.706096] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Creating folder: Project (4ed07a0f23094421876c28a10c8adbe8). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.708328] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0488dc09-4c21-48a6-a32a-33b27661af99 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.720804] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Created folder: Project (4ed07a0f23094421876c28a10c8adbe8) in parent group-v295764. [ 838.720804] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Creating folder: Instances. Parent ref: group-v295774. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.720804] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6b01a0e-1c90-437f-9e5f-87d86424b5e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.728240] env[61978]: DEBUG nova.compute.manager [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 838.739770] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Created folder: Instances in parent group-v295774. [ 838.740469] env[61978]: DEBUG oslo.service.loopingcall [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.740469] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 838.740469] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-378028b8-b999-498f-bc32-3f0780865a60 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.766501] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.766501] env[61978]: value = "task-1394544" [ 838.766501] env[61978]: _type = "Task" [ 838.766501] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.782142] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394544, 'name': CreateVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.802614] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394538, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.893479] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394540, 'name': Rename_Task, 'duration_secs': 0.132263} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.893785] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.894105] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c68e482d-6b38-4918-bd74-29f8b9286639 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.903427] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 838.903427] env[61978]: value = "task-1394545" [ 838.903427] env[61978]: _type = "Task" [ 838.903427] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.911475] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394545, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.919211] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394541, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.999429] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d3d572-b336-420c-8527-a8f311410f2a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.011303] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f78b5d-aa19-4c4f-bd62-088bf562fa73 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.016567] env[61978]: DEBUG nova.network.neutron [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Successfully created port: 11772575-2be7-44fb-b865-5690433fd23d {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 839.052681] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4ec48a-6155-4f45-923d-d68d8b9f8dbf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.061235] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7546c857-fde6-42e0-b17a-b7bf1c104c41 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.074974] env[61978]: DEBUG nova.compute.provider_tree [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 839.276452] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394544, 'name': CreateVM_Task, 'duration_secs': 0.448595} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.276643] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 839.277335] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.277570] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.277796] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 839.278070] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d09163cc-b735-40b7-898c-380cd79553ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.282725] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 839.282725] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5263f3af-c36d-8c02-68af-40a4bb419a67" [ 839.282725] env[61978]: _type = "Task" [ 839.282725] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.292566] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5263f3af-c36d-8c02-68af-40a4bb419a67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.303879] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394538, 'name': ReconfigVM_Task, 'duration_secs': 1.575642} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.303879] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 96a38ed0-c880-4f21-9389-99f039279072/96a38ed0-c880-4f21-9389-99f039279072.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 839.304065] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-56c125af-659c-4d73-921b-445146707327 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.309928] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 839.309928] env[61978]: value = "task-1394546" [ 839.309928] env[61978]: _type = "Task" [ 839.309928] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.318523] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394546, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.422160] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394545, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.429125] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512449} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.429521] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed/892b03e7-a9fc-4b53-bffd-d8b090cbb9ed.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 839.429811] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 839.430122] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dfdba121-bfdf-4b2e-ad38-8f0ad2f15345 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.437259] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 839.437259] env[61978]: value = "task-1394547" [ 839.437259] env[61978]: _type = "Task" [ 839.437259] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.446893] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394547, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.488526] env[61978]: DEBUG nova.network.neutron [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Successfully updated port: 2bb74ad2-1c4e-4d05-ab88-06e859b1a378 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.601991] env[61978]: ERROR nova.scheduler.client.report [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [req-99597eb4-bbc0-4774-af4f-0d52d801dd86] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-99597eb4-bbc0-4774-af4f-0d52d801dd86"}]} [ 839.627460] env[61978]: DEBUG nova.scheduler.client.report [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 839.649336] env[61978]: DEBUG nova.scheduler.client.report [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 839.649560] env[61978]: DEBUG nova.compute.provider_tree [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 839.669807] env[61978]: DEBUG nova.scheduler.client.report [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 839.694283] env[61978]: DEBUG nova.scheduler.client.report [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 839.746790] env[61978]: DEBUG nova.compute.manager [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 839.756076] env[61978]: DEBUG nova.network.neutron [req-de044f78-40d4-41b7-8bce-5d9333385603 req-09e1ec61-ab58-4ac2-82be-b400dccfa24d service nova] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Updated VIF entry in instance network info cache for port 3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.756258] env[61978]: DEBUG nova.network.neutron [req-de044f78-40d4-41b7-8bce-5d9333385603 req-09e1ec61-ab58-4ac2-82be-b400dccfa24d service nova] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Updating instance_info_cache with network_info: [{"id": "3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33", "address": "fa:16:3e:67:ba:f7", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f1e490b-cb", "ovs_interfaceid": "3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.790198] env[61978]: DEBUG nova.virt.hardware [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 839.790471] env[61978]: DEBUG nova.virt.hardware [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 839.790631] env[61978]: DEBUG nova.virt.hardware [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 839.790809] env[61978]: DEBUG nova.virt.hardware [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 839.790949] env[61978]: DEBUG nova.virt.hardware [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 839.791124] env[61978]: DEBUG nova.virt.hardware [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 839.791443] env[61978]: DEBUG nova.virt.hardware [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 839.791620] env[61978]: DEBUG nova.virt.hardware [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 839.791795] env[61978]: DEBUG nova.virt.hardware [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 839.791928] env[61978]: DEBUG nova.virt.hardware [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 839.792111] env[61978]: DEBUG nova.virt.hardware [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 839.795233] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77c49d3-3882-412b-a0c4-063f40b85f50 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.807539] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5263f3af-c36d-8c02-68af-40a4bb419a67, 'name': SearchDatastore_Task, 'duration_secs': 0.010064} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.811199] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6557d003-7a76-45f0-bb3a-31f66fbb847c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.817271] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.817271] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.817271] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.817271] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.817459] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.817459] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-496d0ffc-2aa4-4a16-ae93-dcfcec0cbf34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.841629] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394546, 'name': Rename_Task, 'duration_secs': 0.145521} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.841918] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.842029] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 839.843455] env[61978]: DEBUG nova.network.neutron [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Updated VIF entry in instance network info cache for port a1a8eaeb-61c3-4540-b925-e5516a063dbd. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.843843] env[61978]: DEBUG nova.network.neutron [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Updating instance_info_cache with network_info: [{"id": "a1a8eaeb-61c3-4540-b925-e5516a063dbd", "address": "fa:16:3e:01:ac:60", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.155", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1a8eaeb-61", "ovs_interfaceid": "a1a8eaeb-61c3-4540-b925-e5516a063dbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.845883] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 839.846125] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14756d7d-38c3-4169-81fc-077ceefcf171 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.848646] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ede23964-c712-4189-8a54-d43880b412af {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.855813] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 839.855813] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cb8420-8d4a-696c-fbb7-58437db55593" [ 839.855813] env[61978]: _type = "Task" [ 839.855813] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.865816] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 839.865816] env[61978]: value = "task-1394548" [ 839.865816] env[61978]: _type = "Task" [ 839.865816] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.873210] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cb8420-8d4a-696c-fbb7-58437db55593, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.879740] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394548, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.917931] env[61978]: DEBUG oslo_vmware.api [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394545, 'name': PowerOnVM_Task, 'duration_secs': 0.523175} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.918226] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.918638] env[61978]: INFO nova.compute.manager [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Took 11.92 seconds to spawn the instance on the hypervisor. [ 839.918882] env[61978]: DEBUG nova.compute.manager [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 839.919680] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eabe32f-8c18-4866-8ccf-8d2e66365fe1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.945958] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394547, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070848} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.948530] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 839.949697] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ad6871-8342-47c2-bde8-29b2eb8b80f3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.977151] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed/892b03e7-a9fc-4b53-bffd-d8b090cbb9ed.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 839.980653] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc2eb5da-4767-4a80-b7c3-3b7fc1569b2d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.996885] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "refresh_cache-e249c706-3196-4593-ae96-53f2619e0243" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.996885] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquired lock "refresh_cache-e249c706-3196-4593-ae96-53f2619e0243" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.996885] env[61978]: DEBUG nova.network.neutron [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.002920] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 840.002920] env[61978]: value = "task-1394549" [ 840.002920] env[61978]: _type = "Task" [ 840.002920] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.013933] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394549, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.020410] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a238718-5412-41c7-a3a7-37e8f6b01ac9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.034123] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c51d58-9b6e-4fed-b881-9bcbd735f6bd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.068219] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1db615-381b-4f16-9c65-ebf44fe8a14c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.075999] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e4802c-3593-4097-8630-4298ae0fe3d8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.089442] env[61978]: DEBUG nova.compute.provider_tree [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 840.261375] env[61978]: DEBUG oslo_concurrency.lockutils [req-de044f78-40d4-41b7-8bce-5d9333385603 req-09e1ec61-ab58-4ac2-82be-b400dccfa24d service nova] Releasing lock "refresh_cache-892b03e7-a9fc-4b53-bffd-d8b090cbb9ed" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.351211] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a3b0aec-01d7-4af6-9827-d5b3c555ae55 req-d8aeedcb-3c57-41a5-bd8e-b0a9cd03df6d service nova] Releasing lock "refresh_cache-e9e2deb5-5bf9-4b57-832f-9928d3cda162" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.372553] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cb8420-8d4a-696c-fbb7-58437db55593, 'name': SearchDatastore_Task, 'duration_secs': 0.018123} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.377763] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3176d44-5023-43ad-8532-e9295a3ef8ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.391956] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 840.391956] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5261c637-7eed-25bb-eb21-b20983f0ff22" [ 840.391956] env[61978]: _type = "Task" [ 840.391956] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.392443] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394548, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.400445] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5261c637-7eed-25bb-eb21-b20983f0ff22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.439174] env[61978]: INFO nova.compute.manager [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Took 16.74 seconds to build instance. [ 840.518947] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394549, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.542719] env[61978]: DEBUG nova.network.neutron [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.643787] env[61978]: DEBUG nova.scheduler.client.report [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 17 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 840.646023] env[61978]: DEBUG nova.compute.provider_tree [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 17 to 18 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 840.646023] env[61978]: DEBUG nova.compute.provider_tree [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 840.723059] env[61978]: DEBUG nova.network.neutron [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Updating instance_info_cache with network_info: [{"id": "2bb74ad2-1c4e-4d05-ab88-06e859b1a378", "address": "fa:16:3e:d3:1c:a5", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb74ad2-1c", "ovs_interfaceid": "2bb74ad2-1c4e-4d05-ab88-06e859b1a378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.883759] env[61978]: DEBUG oslo_vmware.api [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394548, 'name': PowerOnVM_Task, 'duration_secs': 0.655296} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.887483] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 840.887483] env[61978]: INFO nova.compute.manager [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Took 10.63 seconds to spawn the instance on the hypervisor. [ 840.887483] env[61978]: DEBUG nova.compute.manager [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 840.887483] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f86c22d-7164-44a6-9cbd-feae9993a62b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.906335] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5261c637-7eed-25bb-eb21-b20983f0ff22, 'name': SearchDatastore_Task, 'duration_secs': 0.016534} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.906669] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.907069] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] e9e2deb5-5bf9-4b57-832f-9928d3cda162/e9e2deb5-5bf9-4b57-832f-9928d3cda162.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 840.909020] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-829bd1c9-9ea6-4b65-90ce-af602251f7d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.916340] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 840.916340] env[61978]: value = "task-1394550" [ 840.916340] env[61978]: _type = "Task" [ 840.916340] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.928622] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394550, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.940900] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b99d779e-6dcc-44a2-a3ec-ecd6375fdd82 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.251s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.019527] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394549, 'name': ReconfigVM_Task, 'duration_secs': 0.526714} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.020252] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed/892b03e7-a9fc-4b53-bffd-d8b090cbb9ed.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 841.021352] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8b6d007-c244-441c-955e-b4e139d0e9ce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.031064] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 841.031064] env[61978]: value = "task-1394551" [ 841.031064] env[61978]: _type = "Task" [ 841.031064] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.042822] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394551, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.153940] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.439s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.153940] env[61978]: DEBUG nova.compute.manager [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 841.156156] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.213s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.159013] env[61978]: INFO nova.compute.claims [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.227442] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Releasing lock "refresh_cache-e249c706-3196-4593-ae96-53f2619e0243" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.227790] env[61978]: DEBUG nova.compute.manager [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Instance network_info: |[{"id": "2bb74ad2-1c4e-4d05-ab88-06e859b1a378", "address": "fa:16:3e:d3:1c:a5", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb74ad2-1c", "ovs_interfaceid": "2bb74ad2-1c4e-4d05-ab88-06e859b1a378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 841.228594] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:1c:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2bb74ad2-1c4e-4d05-ab88-06e859b1a378', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 841.238846] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Creating folder: Project (6337204785bd4d7d98711964c1823f52). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 841.238941] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a70376ec-5b1c-4926-8eeb-04445378f104 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.250556] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Created folder: Project (6337204785bd4d7d98711964c1823f52) in parent group-v295764. [ 841.251096] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Creating folder: Instances. Parent ref: group-v295777. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 841.251414] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d23c66b0-8380-421c-a3b9-21049329400f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.262689] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Created folder: Instances in parent group-v295777. [ 841.262689] env[61978]: DEBUG oslo.service.loopingcall [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 841.262814] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e249c706-3196-4593-ae96-53f2619e0243] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 841.263588] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5d41263-9596-4fed-a3f2-9d745eaa16b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.287045] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 841.287045] env[61978]: value = "task-1394554" [ 841.287045] env[61978]: _type = "Task" [ 841.287045] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.296321] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394554, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.320908] env[61978]: DEBUG nova.network.neutron [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Successfully updated port: 11772575-2be7-44fb-b865-5690433fd23d {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 841.407725] env[61978]: INFO nova.compute.manager [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Took 16.82 seconds to build instance. [ 841.428770] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394550, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.447043] env[61978]: DEBUG nova.compute.manager [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 841.540321] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394551, 'name': Rename_Task, 'duration_secs': 0.152594} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.540692] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 841.541085] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa1398bb-afcd-468f-bbec-a40aff13c2c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.548029] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 841.548029] env[61978]: value = "task-1394555" [ 841.548029] env[61978]: _type = "Task" [ 841.548029] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.560447] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394555, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.663793] env[61978]: DEBUG nova.compute.utils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 841.667331] env[61978]: DEBUG nova.compute.manager [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 841.672022] env[61978]: DEBUG nova.network.neutron [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 841.794099] env[61978]: DEBUG nova.policy [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aef84bef612144ab9535bd63435502ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a65b19ffda9c4efca057051dc399bd54', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 841.802281] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394554, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.825300] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Acquiring lock "refresh_cache-66ee1fd7-40f7-461f-b0c6-5951a58ac660" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.825300] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Acquired lock "refresh_cache-66ee1fd7-40f7-461f-b0c6-5951a58ac660" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.825300] env[61978]: DEBUG nova.network.neutron [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.910778] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4dfdca3a-6f3c-4a08-a81b-9d24b1ae589f tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lock "96a38ed0-c880-4f21-9389-99f039279072" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.337s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.929654] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394550, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.62038} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.930232] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] e9e2deb5-5bf9-4b57-832f-9928d3cda162/e9e2deb5-5bf9-4b57-832f-9928d3cda162.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 841.930634] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 841.931095] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6da8b021-88dd-46cd-a607-cb73ad647d1d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.939863] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 841.939863] env[61978]: value = "task-1394556" [ 841.939863] env[61978]: _type = "Task" [ 841.939863] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.955017] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394556, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.976822] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.059695] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394555, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.175148] env[61978]: DEBUG nova.compute.manager [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 842.258778] env[61978]: DEBUG nova.compute.manager [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] [instance: e249c706-3196-4593-ae96-53f2619e0243] Received event network-vif-plugged-2bb74ad2-1c4e-4d05-ab88-06e859b1a378 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 842.260272] env[61978]: DEBUG oslo_concurrency.lockutils [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] Acquiring lock "e249c706-3196-4593-ae96-53f2619e0243-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.260554] env[61978]: DEBUG oslo_concurrency.lockutils [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] Lock "e249c706-3196-4593-ae96-53f2619e0243-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.260759] env[61978]: DEBUG oslo_concurrency.lockutils [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] Lock "e249c706-3196-4593-ae96-53f2619e0243-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.261054] env[61978]: DEBUG nova.compute.manager [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] [instance: e249c706-3196-4593-ae96-53f2619e0243] No waiting events found dispatching network-vif-plugged-2bb74ad2-1c4e-4d05-ab88-06e859b1a378 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 842.261337] env[61978]: WARNING nova.compute.manager [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] [instance: e249c706-3196-4593-ae96-53f2619e0243] Received unexpected event network-vif-plugged-2bb74ad2-1c4e-4d05-ab88-06e859b1a378 for instance with vm_state building and task_state spawning. [ 842.261884] env[61978]: DEBUG nova.compute.manager [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] [instance: e249c706-3196-4593-ae96-53f2619e0243] Received event network-changed-2bb74ad2-1c4e-4d05-ab88-06e859b1a378 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 842.261884] env[61978]: DEBUG nova.compute.manager [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] [instance: e249c706-3196-4593-ae96-53f2619e0243] Refreshing instance network info cache due to event network-changed-2bb74ad2-1c4e-4d05-ab88-06e859b1a378. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 842.262021] env[61978]: DEBUG oslo_concurrency.lockutils [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] Acquiring lock "refresh_cache-e249c706-3196-4593-ae96-53f2619e0243" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.262359] env[61978]: DEBUG oslo_concurrency.lockutils [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] Acquired lock "refresh_cache-e249c706-3196-4593-ae96-53f2619e0243" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.262359] env[61978]: DEBUG nova.network.neutron [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] [instance: e249c706-3196-4593-ae96-53f2619e0243] Refreshing network info cache for port 2bb74ad2-1c4e-4d05-ab88-06e859b1a378 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.304303] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394554, 'name': CreateVM_Task, 'duration_secs': 0.561275} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.307593] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e249c706-3196-4593-ae96-53f2619e0243] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 842.308664] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.308838] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.309257] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 842.309947] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10e6a8cc-0889-47d0-8284-ba9f27ac1266 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.315511] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 842.315511] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52059d9c-b7b3-fd81-ea04-d5b662a652c7" [ 842.315511] env[61978]: _type = "Task" [ 842.315511] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.330686] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52059d9c-b7b3-fd81-ea04-d5b662a652c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.399381] env[61978]: DEBUG nova.network.neutron [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.415357] env[61978]: DEBUG nova.compute.manager [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 842.427556] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d086a48c-8774-4d9f-af97-c7eac6384270 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.447466] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46de37b-1e4d-4561-9aae-444300fe3660 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.457176] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394556, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071091} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.458618] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.458618] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaaa8131-fb03-4e5a-84a0-004b1f844e45 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.496598] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eef45cf-576e-44cf-82a0-1aaace187f85 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.520617] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] e9e2deb5-5bf9-4b57-832f-9928d3cda162/e9e2deb5-5bf9-4b57-832f-9928d3cda162.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 842.521787] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4742c7a6-bbb8-41ff-bb13-23f212a1e762 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.540802] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d9c014-0328-4e05-b6a8-d0f286326abf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.546294] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 842.546294] env[61978]: value = "task-1394557" [ 842.546294] env[61978]: _type = "Task" [ 842.546294] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.563060] env[61978]: DEBUG nova.compute.provider_tree [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.570983] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394557, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.574860] env[61978]: DEBUG oslo_vmware.api [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394555, 'name': PowerOnVM_Task, 'duration_secs': 0.947703} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.575197] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 842.575462] env[61978]: INFO nova.compute.manager [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Took 9.97 seconds to spawn the instance on the hypervisor. [ 842.575688] env[61978]: DEBUG nova.compute.manager [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 842.576956] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a450847-9635-469d-b824-f41a94db4d7c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.632462] env[61978]: DEBUG nova.network.neutron [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Successfully created port: 85e56a49-da72-4b4e-9fa6-f0112967bcf0 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 842.749990] env[61978]: DEBUG nova.network.neutron [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Updating instance_info_cache with network_info: [{"id": "11772575-2be7-44fb-b865-5690433fd23d", "address": "fa:16:3e:c2:8f:4f", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11772575-2b", "ovs_interfaceid": "11772575-2be7-44fb-b865-5690433fd23d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.836194] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52059d9c-b7b3-fd81-ea04-d5b662a652c7, 'name': SearchDatastore_Task, 'duration_secs': 0.023148} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.836194] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.836194] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.836516] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.836516] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.836787] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.837411] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-304f20ec-9f58-4c74-a381-92ef6ae67e70 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.847065] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.847065] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.850618] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06c1cb49-76b2-4c1d-bebe-0f1f200d5662 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.856709] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 842.856709] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5263c300-1456-1779-fc6f-967175eda98e" [ 842.856709] env[61978]: _type = "Task" [ 842.856709] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.864766] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5263c300-1456-1779-fc6f-967175eda98e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.942150] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.022384] env[61978]: DEBUG nova.compute.manager [req-3d3cba0e-5393-47d1-9904-8c8c343cd6eb req-6136db89-adaf-4293-81d9-648b0b85d615 service nova] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Received event network-vif-plugged-11772575-2be7-44fb-b865-5690433fd23d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 843.022706] env[61978]: DEBUG oslo_concurrency.lockutils [req-3d3cba0e-5393-47d1-9904-8c8c343cd6eb req-6136db89-adaf-4293-81d9-648b0b85d615 service nova] Acquiring lock "66ee1fd7-40f7-461f-b0c6-5951a58ac660-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.022851] env[61978]: DEBUG oslo_concurrency.lockutils [req-3d3cba0e-5393-47d1-9904-8c8c343cd6eb req-6136db89-adaf-4293-81d9-648b0b85d615 service nova] Lock "66ee1fd7-40f7-461f-b0c6-5951a58ac660-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.022974] env[61978]: DEBUG oslo_concurrency.lockutils [req-3d3cba0e-5393-47d1-9904-8c8c343cd6eb req-6136db89-adaf-4293-81d9-648b0b85d615 service nova] Lock "66ee1fd7-40f7-461f-b0c6-5951a58ac660-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.023175] env[61978]: DEBUG nova.compute.manager [req-3d3cba0e-5393-47d1-9904-8c8c343cd6eb req-6136db89-adaf-4293-81d9-648b0b85d615 service nova] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] No waiting events found dispatching network-vif-plugged-11772575-2be7-44fb-b865-5690433fd23d {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 843.023316] env[61978]: WARNING nova.compute.manager [req-3d3cba0e-5393-47d1-9904-8c8c343cd6eb req-6136db89-adaf-4293-81d9-648b0b85d615 service nova] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Received unexpected event network-vif-plugged-11772575-2be7-44fb-b865-5690433fd23d for instance with vm_state building and task_state spawning. [ 843.069688] env[61978]: DEBUG nova.scheduler.client.report [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 843.076480] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394557, 'name': ReconfigVM_Task, 'duration_secs': 0.480496} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.077481] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Reconfigured VM instance instance-00000005 to attach disk [datastore2] e9e2deb5-5bf9-4b57-832f-9928d3cda162/e9e2deb5-5bf9-4b57-832f-9928d3cda162.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 843.078323] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d4d6e42-21a9-4972-8c61-5612d4898149 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.087579] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 843.087579] env[61978]: value = "task-1394558" [ 843.087579] env[61978]: _type = "Task" [ 843.087579] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.106188] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394558, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.106188] env[61978]: INFO nova.compute.manager [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Took 16.99 seconds to build instance. [ 843.188391] env[61978]: DEBUG nova.compute.manager [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 843.189785] env[61978]: DEBUG nova.network.neutron [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] [instance: e249c706-3196-4593-ae96-53f2619e0243] Updated VIF entry in instance network info cache for port 2bb74ad2-1c4e-4d05-ab88-06e859b1a378. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.190132] env[61978]: DEBUG nova.network.neutron [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] [instance: e249c706-3196-4593-ae96-53f2619e0243] Updating instance_info_cache with network_info: [{"id": "2bb74ad2-1c4e-4d05-ab88-06e859b1a378", "address": "fa:16:3e:d3:1c:a5", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.163", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb74ad2-1c", "ovs_interfaceid": "2bb74ad2-1c4e-4d05-ab88-06e859b1a378", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.221572] env[61978]: DEBUG nova.virt.hardware [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 843.222019] env[61978]: DEBUG nova.virt.hardware [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 843.222110] env[61978]: DEBUG nova.virt.hardware [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 843.222254] env[61978]: DEBUG nova.virt.hardware [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 843.222405] env[61978]: DEBUG nova.virt.hardware [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 843.222659] env[61978]: DEBUG nova.virt.hardware [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 843.222949] env[61978]: DEBUG nova.virt.hardware [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 843.223076] env[61978]: DEBUG nova.virt.hardware [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 843.223246] env[61978]: DEBUG nova.virt.hardware [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 843.223410] env[61978]: DEBUG nova.virt.hardware [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 843.223582] env[61978]: DEBUG nova.virt.hardware [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 843.224747] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc302d9-8eb2-44d4-bc82-c3f55af806d3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.233741] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa0df5e-6cf2-480b-9d38-b12b42e2e605 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.252096] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Releasing lock "refresh_cache-66ee1fd7-40f7-461f-b0c6-5951a58ac660" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.252422] env[61978]: DEBUG nova.compute.manager [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Instance network_info: |[{"id": "11772575-2be7-44fb-b865-5690433fd23d", "address": "fa:16:3e:c2:8f:4f", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11772575-2b", "ovs_interfaceid": "11772575-2be7-44fb-b865-5690433fd23d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 843.252964] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:8f:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '11772575-2be7-44fb-b865-5690433fd23d', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 843.260163] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Creating folder: Project (37fb55a44b11490f8d935d8f98a80909). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 843.260455] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7143f22-0993-41fb-b9a0-adb2f805b9a6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.272160] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Created folder: Project (37fb55a44b11490f8d935d8f98a80909) in parent group-v295764. [ 843.272418] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Creating folder: Instances. Parent ref: group-v295780. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 843.272677] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f31b5d52-a89f-453e-bbe9-78171a28bec5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.281389] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Created folder: Instances in parent group-v295780. [ 843.281641] env[61978]: DEBUG oslo.service.loopingcall [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.281838] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 843.282053] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7861bb7f-8791-42db-b2fa-b1510fe49c84 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.302175] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 843.302175] env[61978]: value = "task-1394561" [ 843.302175] env[61978]: _type = "Task" [ 843.302175] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.311558] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394561, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.370694] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5263c300-1456-1779-fc6f-967175eda98e, 'name': SearchDatastore_Task, 'duration_secs': 0.009654} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.370694] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b83648f0-5ae0-44cf-b654-2bdefd2d447f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.375932] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 843.375932] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52018a9a-8440-26f6-4109-cdb144d1f82c" [ 843.375932] env[61978]: _type = "Task" [ 843.375932] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.386320] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52018a9a-8440-26f6-4109-cdb144d1f82c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.577580] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.421s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.578630] env[61978]: DEBUG nova.compute.manager [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 843.581712] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.263s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.583574] env[61978]: INFO nova.compute.claims [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 843.598030] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394558, 'name': Rename_Task, 'duration_secs': 0.363363} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.598631] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 843.598631] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de2afb50-71aa-4866-91ca-b0972ba5fca4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.609114] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf569147-5772-41a5-98c7-40134e15d525 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "892b03e7-a9fc-4b53-bffd-d8b090cbb9ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.502s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.609114] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 843.609114] env[61978]: value = "task-1394562" [ 843.609114] env[61978]: _type = "Task" [ 843.609114] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.619403] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394562, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.693715] env[61978]: DEBUG oslo_concurrency.lockutils [req-33e1db98-5ce2-47b3-8cf7-9808f279746a req-8636485e-6b53-477c-8103-15f951a24861 service nova] Releasing lock "refresh_cache-e249c706-3196-4593-ae96-53f2619e0243" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.814949] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394561, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.891518] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52018a9a-8440-26f6-4109-cdb144d1f82c, 'name': SearchDatastore_Task, 'duration_secs': 0.015493} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.892212] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.892446] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] e249c706-3196-4593-ae96-53f2619e0243/e249c706-3196-4593-ae96-53f2619e0243.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 843.892737] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0689f09-41d7-48de-841f-de9d8f725352 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.902282] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 843.902282] env[61978]: value = "task-1394563" [ 843.902282] env[61978]: _type = "Task" [ 843.902282] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.912989] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394563, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.088326] env[61978]: DEBUG nova.compute.utils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 844.092345] env[61978]: DEBUG nova.compute.manager [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 844.092706] env[61978]: DEBUG nova.network.neutron [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 844.116553] env[61978]: DEBUG nova.compute.manager [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 844.129272] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394562, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.250774] env[61978]: DEBUG nova.policy [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4782614e183484d800b1a9fbc19e51b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '252acdf1eb624fbf91eb9e90c011c038', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 844.318032] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394561, 'name': CreateVM_Task, 'duration_secs': 0.517404} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.318032] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 844.318032] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.318032] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.318032] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 844.318032] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3783011f-a14c-4f26-adf5-90a9d91105b4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.325860] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Waiting for the task: (returnval){ [ 844.325860] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526a4ac5-f736-9f8c-89c2-4c88ae16083e" [ 844.325860] env[61978]: _type = "Task" [ 844.325860] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.337572] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526a4ac5-f736-9f8c-89c2-4c88ae16083e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.414478] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394563, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.601789] env[61978]: DEBUG nova.compute.manager [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 844.633076] env[61978]: DEBUG oslo_vmware.api [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394562, 'name': PowerOnVM_Task, 'duration_secs': 0.715348} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.633076] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.633076] env[61978]: INFO nova.compute.manager [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Took 9.60 seconds to spawn the instance on the hypervisor. [ 844.633076] env[61978]: DEBUG nova.compute.manager [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 844.633076] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f458890-f610-40af-af5f-f03ec4ed762e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.663791] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.841793] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526a4ac5-f736-9f8c-89c2-4c88ae16083e, 'name': SearchDatastore_Task, 'duration_secs': 0.061599} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.842276] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.842883] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 844.842883] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.843068] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.843235] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 844.843423] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40a5f1df-1a70-4bb5-b22c-65c4ba7853f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.851632] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 844.851793] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 844.853942] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bdcc632-dbed-4ae5-ae95-2d401711d81f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.860812] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Waiting for the task: (returnval){ [ 844.860812] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52732dd7-e4c1-091e-b93a-243d1e64e679" [ 844.860812] env[61978]: _type = "Task" [ 844.860812] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.868889] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52732dd7-e4c1-091e-b93a-243d1e64e679, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.870512] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22379a8-0e21-45ec-b722-f95a5b74394c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.876962] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7feb5dc6-227e-4688-9ef9-7e7a4a723229 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.909378] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535accef-b2e1-4491-95bb-a56cc96e5476 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.916990] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394563, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.875666} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.922048] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] e249c706-3196-4593-ae96-53f2619e0243/e249c706-3196-4593-ae96-53f2619e0243.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 844.922636] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 844.922636] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a331235-fdcc-4887-a12b-2e1b523eac11 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.925587] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74b5955-e655-46af-9cd3-14812b284387 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.941342] env[61978]: DEBUG nova.compute.provider_tree [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.944863] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 844.944863] env[61978]: value = "task-1394564" [ 844.944863] env[61978]: _type = "Task" [ 844.944863] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.953580] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394564, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.989129] env[61978]: DEBUG nova.network.neutron [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Successfully updated port: 85e56a49-da72-4b4e-9fa6-f0112967bcf0 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 845.164150] env[61978]: DEBUG nova.network.neutron [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Successfully created port: 3a886f4f-5f7c-4f97-8f00-2555aebe9856 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 845.169718] env[61978]: INFO nova.compute.manager [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Took 18.57 seconds to build instance. [ 845.188539] env[61978]: INFO nova.compute.manager [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Rebuilding instance [ 845.243385] env[61978]: DEBUG nova.compute.manager [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 845.244699] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d253e3-9c40-45da-8095-6383820f75f9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.372627] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52732dd7-e4c1-091e-b93a-243d1e64e679, 'name': SearchDatastore_Task, 'duration_secs': 0.020687} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.373565] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1251a3c-e1e8-418a-ae38-77ae411ed7de {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.378841] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Waiting for the task: (returnval){ [ 845.378841] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528a7977-087c-d5c8-d254-4032174ed383" [ 845.378841] env[61978]: _type = "Task" [ 845.378841] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.386433] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528a7977-087c-d5c8-d254-4032174ed383, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.447067] env[61978]: DEBUG nova.scheduler.client.report [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 845.463099] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394564, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071374} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.463418] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 845.464309] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30c15f3-6a0d-431b-a05c-a1ea62053802 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.489694] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] e249c706-3196-4593-ae96-53f2619e0243/e249c706-3196-4593-ae96-53f2619e0243.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.490314] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ee3a9d2-9c8e-4a88-b84f-81833747486d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.506378] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Acquiring lock "refresh_cache-5d48e854-45fd-4767-91b7-100f84bdca55" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.506528] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Acquired lock "refresh_cache-5d48e854-45fd-4767-91b7-100f84bdca55" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.506678] env[61978]: DEBUG nova.network.neutron [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 845.513547] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 845.513547] env[61978]: value = "task-1394565" [ 845.513547] env[61978]: _type = "Task" [ 845.513547] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.523187] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394565, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.613506] env[61978]: DEBUG nova.compute.manager [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 845.659296] env[61978]: DEBUG nova.virt.hardware [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 845.659553] env[61978]: DEBUG nova.virt.hardware [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 845.659727] env[61978]: DEBUG nova.virt.hardware [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.659910] env[61978]: DEBUG nova.virt.hardware [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 845.660312] env[61978]: DEBUG nova.virt.hardware [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.660579] env[61978]: DEBUG nova.virt.hardware [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 845.660836] env[61978]: DEBUG nova.virt.hardware [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 845.661050] env[61978]: DEBUG nova.virt.hardware [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 845.661257] env[61978]: DEBUG nova.virt.hardware [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 845.661475] env[61978]: DEBUG nova.virt.hardware [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 845.661979] env[61978]: DEBUG nova.virt.hardware [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 845.662959] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5383dd25-3575-46e6-9c86-f4ba7a8a7485 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.671712] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6169f88-5ee9-4fa5-a85d-d9fab9819b98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.677258] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8c4ef4fb-455c-4e54-8056-59ae32b16d8e tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "e9e2deb5-5bf9-4b57-832f-9928d3cda162" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.089s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.756855] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 845.757234] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-edca702d-0a40-4f07-a655-16ff8470fc0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.765337] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 845.765337] env[61978]: value = "task-1394566" [ 845.765337] env[61978]: _type = "Task" [ 845.765337] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.774465] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394566, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.896166] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528a7977-087c-d5c8-d254-4032174ed383, 'name': SearchDatastore_Task, 'duration_secs': 0.038425} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.896166] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.896166] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 66ee1fd7-40f7-461f-b0c6-5951a58ac660/66ee1fd7-40f7-461f-b0c6-5951a58ac660.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 845.896166] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f64f2b2-4d0e-4f14-8044-d9db425e97a7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.909116] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Waiting for the task: (returnval){ [ 845.909116] env[61978]: value = "task-1394567" [ 845.909116] env[61978]: _type = "Task" [ 845.909116] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.922082] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394567, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.958792] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.376s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.959425] env[61978]: DEBUG nova.compute.manager [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 845.962359] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.773s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.964354] env[61978]: INFO nova.compute.claims [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 846.015293] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "371ddf66-a39b-41c4-bbd1-2a1c1b99834e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.018013] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "371ddf66-a39b-41c4-bbd1-2a1c1b99834e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.033630] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394565, 'name': ReconfigVM_Task, 'duration_secs': 0.489567} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.033758] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Reconfigured VM instance instance-00000004 to attach disk [datastore2] e249c706-3196-4593-ae96-53f2619e0243/e249c706-3196-4593-ae96-53f2619e0243.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.034437] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81d1127c-40d5-4680-9781-62858879373b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.045242] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 846.045242] env[61978]: value = "task-1394568" [ 846.045242] env[61978]: _type = "Task" [ 846.045242] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.062287] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394568, 'name': Rename_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.100172] env[61978]: DEBUG nova.network.neutron [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 846.208182] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "a0762952-2afd-448a-8e46-ba788a4ca131" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.208657] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "a0762952-2afd-448a-8e46-ba788a4ca131" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.283120] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394566, 'name': PowerOffVM_Task, 'duration_secs': 0.186983} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.284041] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 846.284041] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 846.284792] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0515f9-3773-4266-9beb-325e13d43099 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.293273] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 846.293544] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e553769-aa18-49f2-a3e6-425bcc0adca6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.323808] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 846.324023] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 846.324223] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Deleting the datastore file [datastore2] 96a38ed0-c880-4f21-9389-99f039279072 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 846.324509] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85c75d99-c703-44a0-909f-780ae121815e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.338874] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 846.338874] env[61978]: value = "task-1394570" [ 846.338874] env[61978]: _type = "Task" [ 846.338874] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.355307] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394570, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.420014] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394567, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.470366] env[61978]: DEBUG nova.compute.utils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 846.474936] env[61978]: DEBUG nova.compute.manager [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 846.474936] env[61978]: DEBUG nova.network.neutron [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 846.518779] env[61978]: DEBUG nova.compute.manager [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 846.556021] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394568, 'name': Rename_Task, 'duration_secs': 0.140823} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.556085] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 846.556364] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16508b9b-b9e1-4a4f-b0ae-eb72f7033f44 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.563029] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 846.563029] env[61978]: value = "task-1394571" [ 846.563029] env[61978]: _type = "Task" [ 846.563029] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.577633] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394571, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.594655] env[61978]: DEBUG nova.network.neutron [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Updating instance_info_cache with network_info: [{"id": "85e56a49-da72-4b4e-9fa6-f0112967bcf0", "address": "fa:16:3e:0d:7f:c7", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85e56a49-da", "ovs_interfaceid": "85e56a49-da72-4b4e-9fa6-f0112967bcf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.609990] env[61978]: DEBUG nova.policy [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e607dfc944154c1faed12da382640f0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d7394d965f94155a34dd0ecc0957649', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 846.850000] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394570, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33692} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.850238] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 846.850491] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 846.850764] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 846.921450] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394567, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630365} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.921767] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 66ee1fd7-40f7-461f-b0c6-5951a58ac660/66ee1fd7-40f7-461f-b0c6-5951a58ac660.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 846.923018] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 846.923018] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1e6ad14-0768-4f98-b3d8-e89dc64c5b08 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.930725] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Waiting for the task: (returnval){ [ 846.930725] env[61978]: value = "task-1394572" [ 846.930725] env[61978]: _type = "Task" [ 846.930725] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.941025] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394572, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.979455] env[61978]: DEBUG nova.compute.manager [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 847.049741] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.069600] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Acquiring lock "081339d7-6d9b-4b66-a816-467d23196c9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.069840] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Lock "081339d7-6d9b-4b66-a816-467d23196c9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.081510] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394571, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.101214] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Releasing lock "refresh_cache-5d48e854-45fd-4767-91b7-100f84bdca55" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.101524] env[61978]: DEBUG nova.compute.manager [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Instance network_info: |[{"id": "85e56a49-da72-4b4e-9fa6-f0112967bcf0", "address": "fa:16:3e:0d:7f:c7", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85e56a49-da", "ovs_interfaceid": "85e56a49-da72-4b4e-9fa6-f0112967bcf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 847.105355] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:7f:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85e56a49-da72-4b4e-9fa6-f0112967bcf0', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.112276] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Creating folder: Project (a65b19ffda9c4efca057051dc399bd54). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.112788] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d92a559-a095-4702-b22b-a0e5623728ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.122047] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Created folder: Project (a65b19ffda9c4efca057051dc399bd54) in parent group-v295764. [ 847.122333] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Creating folder: Instances. Parent ref: group-v295783. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.122473] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39af51fa-1cf0-4f90-8547-11208adeb9f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.132511] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Created folder: Instances in parent group-v295783. [ 847.132511] env[61978]: DEBUG oslo.service.loopingcall [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 847.132511] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 847.132730] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecc928c8-4387-4680-84a9-0f241d9f6c7d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.154871] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 847.154871] env[61978]: value = "task-1394575" [ 847.154871] env[61978]: _type = "Task" [ 847.154871] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.162528] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394575, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.188036] env[61978]: DEBUG nova.compute.manager [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Received event network-changed-11772575-2be7-44fb-b865-5690433fd23d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 847.188384] env[61978]: DEBUG nova.compute.manager [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Refreshing instance network info cache due to event network-changed-11772575-2be7-44fb-b865-5690433fd23d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 847.188583] env[61978]: DEBUG oslo_concurrency.lockutils [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] Acquiring lock "refresh_cache-66ee1fd7-40f7-461f-b0c6-5951a58ac660" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.188628] env[61978]: DEBUG oslo_concurrency.lockutils [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] Acquired lock "refresh_cache-66ee1fd7-40f7-461f-b0c6-5951a58ac660" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.188906] env[61978]: DEBUG nova.network.neutron [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Refreshing network info cache for port 11772575-2be7-44fb-b865-5690433fd23d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.304522] env[61978]: DEBUG oslo_concurrency.lockutils [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "892b03e7-a9fc-4b53-bffd-d8b090cbb9ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.304807] env[61978]: DEBUG oslo_concurrency.lockutils [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "892b03e7-a9fc-4b53-bffd-d8b090cbb9ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.304986] env[61978]: DEBUG oslo_concurrency.lockutils [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "892b03e7-a9fc-4b53-bffd-d8b090cbb9ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.305186] env[61978]: DEBUG oslo_concurrency.lockutils [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "892b03e7-a9fc-4b53-bffd-d8b090cbb9ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.305356] env[61978]: DEBUG oslo_concurrency.lockutils [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "892b03e7-a9fc-4b53-bffd-d8b090cbb9ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.308442] env[61978]: INFO nova.compute.manager [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Terminating instance [ 847.310746] env[61978]: DEBUG nova.compute.manager [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 847.310746] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 847.311380] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109d7414-9cd9-430d-afa8-fe645a8b3b6a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.321995] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 847.324359] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22bffd70-39dc-4ddd-9591-367da4b49c3e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.332670] env[61978]: DEBUG oslo_vmware.api [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 847.332670] env[61978]: value = "task-1394576" [ 847.332670] env[61978]: _type = "Task" [ 847.332670] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.342608] env[61978]: DEBUG oslo_vmware.api [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394576, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.344971] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e978578-ce00-4eb2-bb60-65a282356a77 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.353080] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d45d420-7204-4768-a6f2-c124b05c01ad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.393158] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4b395a-f47c-405c-bad2-8f240aa8a6f4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.402259] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3810067f-e3f6-47f5-b36a-ceaad15e5db4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.419102] env[61978]: DEBUG nova.compute.provider_tree [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.443084] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394572, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074886} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.443084] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.444838] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c194530-0830-4e96-938f-c6d165613842 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.467668] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 66ee1fd7-40f7-461f-b0c6-5951a58ac660/66ee1fd7-40f7-461f-b0c6-5951a58ac660.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.467987] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc8d9b70-8799-47b7-b4b5-26b5d73b09cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.493249] env[61978]: DEBUG nova.network.neutron [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Successfully created port: 15ee1476-11da-4794-a070-c4365a572948 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 847.497365] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Waiting for the task: (returnval){ [ 847.497365] env[61978]: value = "task-1394577" [ 847.497365] env[61978]: _type = "Task" [ 847.497365] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.507634] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394577, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.579574] env[61978]: DEBUG oslo_vmware.api [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394571, 'name': PowerOnVM_Task, 'duration_secs': 0.528542} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.579845] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.580043] env[61978]: INFO nova.compute.manager [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Took 10.23 seconds to spawn the instance on the hypervisor. [ 847.580231] env[61978]: DEBUG nova.compute.manager [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 847.581208] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f27af78-588b-4096-8071-dad139151102 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.663653] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394575, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.851189] env[61978]: DEBUG oslo_vmware.api [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394576, 'name': PowerOffVM_Task, 'duration_secs': 0.339751} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.851310] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 847.851473] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.851809] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a799f18-9069-4938-be60-34219f731b74 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.909434] env[61978]: DEBUG nova.virt.hardware [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 847.910232] env[61978]: DEBUG nova.virt.hardware [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 847.910232] env[61978]: DEBUG nova.virt.hardware [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 847.910628] env[61978]: DEBUG nova.virt.hardware [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 847.910741] env[61978]: DEBUG nova.virt.hardware [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 847.910977] env[61978]: DEBUG nova.virt.hardware [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 847.911798] env[61978]: DEBUG nova.virt.hardware [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 847.911798] env[61978]: DEBUG nova.virt.hardware [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 847.911913] env[61978]: DEBUG nova.virt.hardware [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 847.912122] env[61978]: DEBUG nova.virt.hardware [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 847.912915] env[61978]: DEBUG nova.virt.hardware [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 847.914200] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664d2439-febe-4c43-b342-97314edc6627 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.919995] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 847.920296] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 847.920592] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleting the datastore file [datastore2] 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 847.921305] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21b9815f-2e48-4bf2-a5bc-67c4535b6d0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.927782] env[61978]: DEBUG nova.scheduler.client.report [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 847.935645] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaaa87cd-58b7-49d9-8ce1-97e80d2f727c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.942381] env[61978]: DEBUG oslo_vmware.api [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 847.942381] env[61978]: value = "task-1394579" [ 847.942381] env[61978]: _type = "Task" [ 847.942381] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.961642] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Instance VIF info [] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.974206] env[61978]: DEBUG oslo.service.loopingcall [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 847.974206] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 847.974206] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a889a3ff-fcea-4180-8bc6-cd07c0d96b49 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.999184] env[61978]: DEBUG nova.compute.manager [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 848.001347] env[61978]: DEBUG oslo_vmware.api [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.007235] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 848.007235] env[61978]: value = "task-1394580" [ 848.007235] env[61978]: _type = "Task" [ 848.007235] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.011539] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394577, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.020074] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394580, 'name': CreateVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.036456] env[61978]: DEBUG nova.virt.hardware [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 848.036706] env[61978]: DEBUG nova.virt.hardware [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 848.036863] env[61978]: DEBUG nova.virt.hardware [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 848.037057] env[61978]: DEBUG nova.virt.hardware [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 848.037250] env[61978]: DEBUG nova.virt.hardware [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 848.037460] env[61978]: DEBUG nova.virt.hardware [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 848.037772] env[61978]: DEBUG nova.virt.hardware [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 848.037993] env[61978]: DEBUG nova.virt.hardware [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 848.038284] env[61978]: DEBUG nova.virt.hardware [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 848.038552] env[61978]: DEBUG nova.virt.hardware [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 848.038827] env[61978]: DEBUG nova.virt.hardware [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 848.040024] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439bb887-afa3-40ec-8d9e-e1a6db2e994b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.050143] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601f0385-fa9d-460f-85c5-cb7b59cb2b55 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.112767] env[61978]: INFO nova.compute.manager [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: e249c706-3196-4593-ae96-53f2619e0243] Took 21.51 seconds to build instance. [ 848.174652] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394575, 'name': CreateVM_Task, 'duration_secs': 0.550884} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.174652] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 848.174652] env[61978]: DEBUG oslo_vmware.service [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04955ad4-7e79-4927-af32-1e91040f9645 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.182815] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.182815] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.182970] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 848.183239] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d96eb722-99fd-46af-b9b3-30886b732f8c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.190032] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 848.190032] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d9b2ef-2a24-b9f6-bdc2-36da9821bdaa" [ 848.190032] env[61978]: _type = "Task" [ 848.190032] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.195933] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d9b2ef-2a24-b9f6-bdc2-36da9821bdaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.283880] env[61978]: DEBUG nova.network.neutron [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Successfully updated port: 3a886f4f-5f7c-4f97-8f00-2555aebe9856 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 848.445107] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.446661] env[61978]: DEBUG nova.compute.manager [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 848.450123] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.474s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.451680] env[61978]: INFO nova.compute.claims [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 848.467159] env[61978]: DEBUG oslo_vmware.api [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394579, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.37498} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.467415] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 848.467616] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 848.467796] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 848.467973] env[61978]: INFO nova.compute.manager [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Took 1.16 seconds to destroy the instance on the hypervisor. [ 848.468270] env[61978]: DEBUG oslo.service.loopingcall [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.468461] env[61978]: DEBUG nova.compute.manager [-] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 848.468551] env[61978]: DEBUG nova.network.neutron [-] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.514790] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394577, 'name': ReconfigVM_Task, 'duration_secs': 0.546429} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.520701] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 66ee1fd7-40f7-461f-b0c6-5951a58ac660/66ee1fd7-40f7-461f-b0c6-5951a58ac660.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.521260] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4b4d28cd-82a4-4bde-b150-71d87f119529 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.531299] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394580, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.532882] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Waiting for the task: (returnval){ [ 848.532882] env[61978]: value = "task-1394581" [ 848.532882] env[61978]: _type = "Task" [ 848.532882] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.545066] env[61978]: DEBUG nova.network.neutron [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Updated VIF entry in instance network info cache for port 11772575-2be7-44fb-b865-5690433fd23d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 848.545981] env[61978]: DEBUG nova.network.neutron [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Updating instance_info_cache with network_info: [{"id": "11772575-2be7-44fb-b865-5690433fd23d", "address": "fa:16:3e:c2:8f:4f", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11772575-2b", "ovs_interfaceid": "11772575-2be7-44fb-b865-5690433fd23d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.550779] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394581, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.623317] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb2555ab-ef9d-437c-aa5d-33434b178bf8 tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "e249c706-3196-4593-ae96-53f2619e0243" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.032s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.708456] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.708724] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.708954] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.709107] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.709281] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.709871] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29c15b1d-d762-4165-b915-8189c968c14a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.724962] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.724962] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 848.726293] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196ddcb7-1fdb-4e5b-9b2e-3261f84ae388 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.737726] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-645b5fce-194e-4289-8a43-d03b8519a479 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.744225] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 848.744225] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5228f2cb-9416-0ba9-3bfb-e1ec6392dfd1" [ 848.744225] env[61978]: _type = "Task" [ 848.744225] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.759470] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5228f2cb-9416-0ba9-3bfb-e1ec6392dfd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.790035] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.790035] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.790035] env[61978]: DEBUG nova.network.neutron [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 848.959035] env[61978]: DEBUG nova.compute.utils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 848.960526] env[61978]: DEBUG nova.compute.manager [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 848.961129] env[61978]: DEBUG nova.network.neutron [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 849.026787] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394580, 'name': CreateVM_Task, 'duration_secs': 0.624456} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.027787] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 849.032524] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.032776] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.034731] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 849.034731] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5705658-d5bd-4779-9911-8988454e0c2b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.046365] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394581, 'name': Rename_Task, 'duration_secs': 0.32545} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.047896] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.050969] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 849.050969] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528c0bbe-053b-a064-3812-f50ee218c38b" [ 849.050969] env[61978]: _type = "Task" [ 849.050969] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.050969] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27f677c2-0c7e-48ad-81da-df4a5befe35a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.055200] env[61978]: DEBUG oslo_concurrency.lockutils [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] Releasing lock "refresh_cache-66ee1fd7-40f7-461f-b0c6-5951a58ac660" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.055200] env[61978]: DEBUG nova.compute.manager [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Received event network-vif-plugged-85e56a49-da72-4b4e-9fa6-f0112967bcf0 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 849.055200] env[61978]: DEBUG oslo_concurrency.lockutils [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] Acquiring lock "5d48e854-45fd-4767-91b7-100f84bdca55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.055200] env[61978]: DEBUG oslo_concurrency.lockutils [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] Lock "5d48e854-45fd-4767-91b7-100f84bdca55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.055200] env[61978]: DEBUG oslo_concurrency.lockutils [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] Lock "5d48e854-45fd-4767-91b7-100f84bdca55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.055441] env[61978]: DEBUG nova.compute.manager [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] No waiting events found dispatching network-vif-plugged-85e56a49-da72-4b4e-9fa6-f0112967bcf0 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 849.055441] env[61978]: WARNING nova.compute.manager [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Received unexpected event network-vif-plugged-85e56a49-da72-4b4e-9fa6-f0112967bcf0 for instance with vm_state building and task_state spawning. [ 849.055441] env[61978]: DEBUG nova.compute.manager [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Received event network-changed-85e56a49-da72-4b4e-9fa6-f0112967bcf0 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 849.055441] env[61978]: DEBUG nova.compute.manager [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Refreshing instance network info cache due to event network-changed-85e56a49-da72-4b4e-9fa6-f0112967bcf0. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 849.055441] env[61978]: DEBUG oslo_concurrency.lockutils [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] Acquiring lock "refresh_cache-5d48e854-45fd-4767-91b7-100f84bdca55" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.055593] env[61978]: DEBUG oslo_concurrency.lockutils [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] Acquired lock "refresh_cache-5d48e854-45fd-4767-91b7-100f84bdca55" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.055593] env[61978]: DEBUG nova.network.neutron [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Refreshing network info cache for port 85e56a49-da72-4b4e-9fa6-f0112967bcf0 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.070969] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.071289] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 849.071289] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.071814] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Waiting for the task: (returnval){ [ 849.071814] env[61978]: value = "task-1394582" [ 849.071814] env[61978]: _type = "Task" [ 849.071814] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.089540] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394582, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.110956] env[61978]: DEBUG nova.policy [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ab697d6ab4e4ece8b290afbf5ec1366', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a33ac41ae0247b59c400c6ed9145239', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 849.128877] env[61978]: DEBUG nova.compute.manager [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 849.263568] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Preparing fetch location {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 849.263568] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Creating directory with path [datastore1] vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 849.263568] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea8a01d7-793f-4317-8461-c5346e49a05c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.304195] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Created directory with path [datastore1] vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.304560] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Fetch image to [datastore1] vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 849.305361] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Downloading image file data 4732143d-796a-4a66-9f1e-806f8b0654e0 to [datastore1] vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk on the data store datastore1 {{(pid=61978) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 849.306799] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4dba741-e0bd-424f-9154-5dfa62486094 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.322025] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9daf3cff-252d-4a9f-a94b-36983e56ffa5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.340900] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6007c2b2-2986-4541-b898-f433f28f33e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.387543] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ecb833-fea2-4425-9b9c-8060fffb1b2b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.396022] env[61978]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5473a247-d654-48b0-a2f8-187c26d95f21 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.415315] env[61978]: DEBUG nova.network.neutron [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 849.468760] env[61978]: DEBUG nova.compute.manager [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 849.499141] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Downloading image file data 4732143d-796a-4a66-9f1e-806f8b0654e0 to the data store datastore1 {{(pid=61978) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 849.587425] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394582, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.591930] env[61978]: DEBUG oslo_vmware.rw_handles [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61978) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 849.686258] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.866680] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b0170d-adf8-4a2d-836d-2a5565c62dd2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.882870] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87c6e1a-9265-42cd-b60d-c505d58f4da5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.886793] env[61978]: DEBUG nova.network.neutron [-] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.928584] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50311e40-148e-4eff-994d-06b8e47ccf04 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.942459] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6420a181-1ca4-4c6e-9ca7-b896a02da6c3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.958782] env[61978]: DEBUG nova.compute.provider_tree [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.089132] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394582, 'name': PowerOnVM_Task} progress is 91%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.103043] env[61978]: DEBUG nova.network.neutron [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Successfully created port: 2daa968c-ac9c-4f15-ad2b-7977f5581ef1 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 850.137313] env[61978]: DEBUG nova.network.neutron [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance_info_cache with network_info: [{"id": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "address": "fa:16:3e:94:aa:92", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a886f4f-5f", "ovs_interfaceid": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.392612] env[61978]: INFO nova.compute.manager [-] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Took 1.92 seconds to deallocate network for instance. [ 850.462544] env[61978]: DEBUG nova.scheduler.client.report [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 850.486181] env[61978]: DEBUG nova.compute.manager [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 850.496987] env[61978]: DEBUG nova.network.neutron [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Updated VIF entry in instance network info cache for port 85e56a49-da72-4b4e-9fa6-f0112967bcf0. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 850.497356] env[61978]: DEBUG nova.network.neutron [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Updating instance_info_cache with network_info: [{"id": "85e56a49-da72-4b4e-9fa6-f0112967bcf0", "address": "fa:16:3e:0d:7f:c7", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85e56a49-da", "ovs_interfaceid": "85e56a49-da72-4b4e-9fa6-f0112967bcf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.505571] env[61978]: DEBUG oslo_vmware.rw_handles [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Completed reading data from the image iterator. {{(pid=61978) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 850.506147] env[61978]: DEBUG oslo_vmware.rw_handles [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 850.520809] env[61978]: DEBUG nova.virt.hardware [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 850.521209] env[61978]: DEBUG nova.virt.hardware [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 850.521422] env[61978]: DEBUG nova.virt.hardware [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 850.521685] env[61978]: DEBUG nova.virt.hardware [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 850.521842] env[61978]: DEBUG nova.virt.hardware [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 850.522156] env[61978]: DEBUG nova.virt.hardware [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 850.522235] env[61978]: DEBUG nova.virt.hardware [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 850.522358] env[61978]: DEBUG nova.virt.hardware [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 850.522521] env[61978]: DEBUG nova.virt.hardware [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 850.522683] env[61978]: DEBUG nova.virt.hardware [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 850.523051] env[61978]: DEBUG nova.virt.hardware [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 850.524057] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5f3292-c4aa-43e9-8d9a-7c84408d4785 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.533859] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87702acf-d583-44f0-8d25-14c21423d087 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.585239] env[61978]: DEBUG oslo_vmware.api [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394582, 'name': PowerOnVM_Task, 'duration_secs': 1.057501} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.585508] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.585712] env[61978]: INFO nova.compute.manager [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Took 10.84 seconds to spawn the instance on the hypervisor. [ 850.585895] env[61978]: DEBUG nova.compute.manager [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 850.586710] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc7e033-0962-4eb5-b5ae-16a4f4acb9bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.645189] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Downloaded image file data 4732143d-796a-4a66-9f1e-806f8b0654e0 to vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk on the data store datastore1 {{(pid=61978) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 850.647132] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Caching image {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 850.647390] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Copying Virtual Disk [datastore1] vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk to [datastore1] vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 850.648289] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.648575] env[61978]: DEBUG nova.compute.manager [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Instance network_info: |[{"id": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "address": "fa:16:3e:94:aa:92", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a886f4f-5f", "ovs_interfaceid": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 850.648848] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8749173f-b59b-4b98-8479-b0030ec1797b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.651316] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:aa:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a886f4f-5f7c-4f97-8f00-2555aebe9856', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 850.658901] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Creating folder: Project (252acdf1eb624fbf91eb9e90c011c038). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 850.659866] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2861ea06-3668-4e73-80a3-7f5031c3c7eb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.668802] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 850.668802] env[61978]: value = "task-1394584" [ 850.668802] env[61978]: _type = "Task" [ 850.668802] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.674259] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Created folder: Project (252acdf1eb624fbf91eb9e90c011c038) in parent group-v295764. [ 850.674454] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Creating folder: Instances. Parent ref: group-v295787. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 850.675242] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31d23a27-588a-4a85-ae6b-0bf2da722d8b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.680869] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.690776] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Created folder: Instances in parent group-v295787. [ 850.691036] env[61978]: DEBUG oslo.service.loopingcall [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.691231] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b26a4784-698d-477a-8db7-58156899d231] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 850.691437] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64c87b69-d146-415b-bbb0-52f4f9ece99a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.712466] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 850.712466] env[61978]: value = "task-1394586" [ 850.712466] env[61978]: _type = "Task" [ 850.712466] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.721617] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394586, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.813935] env[61978]: DEBUG nova.network.neutron [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Successfully updated port: 15ee1476-11da-4794-a070-c4365a572948 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 850.837037] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Acquiring lock "e249c706-3196-4593-ae96-53f2619e0243" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.837358] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Lock "e249c706-3196-4593-ae96-53f2619e0243" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.837623] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Acquiring lock "e249c706-3196-4593-ae96-53f2619e0243-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.837868] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Lock "e249c706-3196-4593-ae96-53f2619e0243-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.838093] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Lock "e249c706-3196-4593-ae96-53f2619e0243-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.840378] env[61978]: INFO nova.compute.manager [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] [instance: e249c706-3196-4593-ae96-53f2619e0243] Terminating instance [ 850.842574] env[61978]: DEBUG nova.compute.manager [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] [instance: e249c706-3196-4593-ae96-53f2619e0243] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 850.842775] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] [instance: e249c706-3196-4593-ae96-53f2619e0243] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.844251] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25d4d46-82f5-4667-9f63-e0f9d2990340 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.854412] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] [instance: e249c706-3196-4593-ae96-53f2619e0243] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 850.855026] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d31c975-20a4-4be9-9e8a-9a54e634dd84 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.864604] env[61978]: DEBUG oslo_vmware.api [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Waiting for the task: (returnval){ [ 850.864604] env[61978]: value = "task-1394587" [ 850.864604] env[61978]: _type = "Task" [ 850.864604] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.876379] env[61978]: DEBUG oslo_vmware.api [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Task: {'id': task-1394587, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.906761] env[61978]: DEBUG oslo_concurrency.lockutils [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.930132] env[61978]: DEBUG nova.compute.manager [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] [instance: b26a4784-698d-477a-8db7-58156899d231] Received event network-vif-plugged-3a886f4f-5f7c-4f97-8f00-2555aebe9856 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 850.930132] env[61978]: DEBUG oslo_concurrency.lockutils [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] Acquiring lock "b26a4784-698d-477a-8db7-58156899d231-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.931187] env[61978]: DEBUG oslo_concurrency.lockutils [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] Lock "b26a4784-698d-477a-8db7-58156899d231-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.931242] env[61978]: DEBUG oslo_concurrency.lockutils [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] Lock "b26a4784-698d-477a-8db7-58156899d231-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.931453] env[61978]: DEBUG nova.compute.manager [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] [instance: b26a4784-698d-477a-8db7-58156899d231] No waiting events found dispatching network-vif-plugged-3a886f4f-5f7c-4f97-8f00-2555aebe9856 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 850.931679] env[61978]: WARNING nova.compute.manager [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] [instance: b26a4784-698d-477a-8db7-58156899d231] Received unexpected event network-vif-plugged-3a886f4f-5f7c-4f97-8f00-2555aebe9856 for instance with vm_state building and task_state spawning. [ 850.931871] env[61978]: DEBUG nova.compute.manager [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] [instance: b26a4784-698d-477a-8db7-58156899d231] Received event network-changed-3a886f4f-5f7c-4f97-8f00-2555aebe9856 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 850.932050] env[61978]: DEBUG nova.compute.manager [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] [instance: b26a4784-698d-477a-8db7-58156899d231] Refreshing instance network info cache due to event network-changed-3a886f4f-5f7c-4f97-8f00-2555aebe9856. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 850.932293] env[61978]: DEBUG oslo_concurrency.lockutils [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] Acquiring lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.932456] env[61978]: DEBUG oslo_concurrency.lockutils [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] Acquired lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.932642] env[61978]: DEBUG nova.network.neutron [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] [instance: b26a4784-698d-477a-8db7-58156899d231] Refreshing network info cache for port 3a886f4f-5f7c-4f97-8f00-2555aebe9856 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 850.968281] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.518s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.968832] env[61978]: DEBUG nova.compute.manager [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 850.972184] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.030s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.974490] env[61978]: INFO nova.compute.claims [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.005298] env[61978]: DEBUG oslo_concurrency.lockutils [req-5b6948e4-f865-4096-bb36-97390e9d2ab4 req-e61b9cf4-5878-4608-9512-c618d920963d service nova] Releasing lock "refresh_cache-5d48e854-45fd-4767-91b7-100f84bdca55" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.113887] env[61978]: INFO nova.compute.manager [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Took 23.33 seconds to build instance. [ 851.183603] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394584, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.228448] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394586, 'name': CreateVM_Task, 'duration_secs': 0.435476} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.228448] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b26a4784-698d-477a-8db7-58156899d231] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 851.229382] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.230129] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.230968] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 851.231747] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f7c0e9e-ad08-4f4a-94a3-ebf5850cbef2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.241925] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 851.241925] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b57a6d-0be4-a9ed-4698-f33f106a583a" [ 851.241925] env[61978]: _type = "Task" [ 851.241925] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.254145] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b57a6d-0be4-a9ed-4698-f33f106a583a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.321835] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "refresh_cache-92eb5edb-803b-48d4-8c4f-338d7c3b3d13" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.322175] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "refresh_cache-92eb5edb-803b-48d4-8c4f-338d7c3b3d13" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.322390] env[61978]: DEBUG nova.network.neutron [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 851.382021] env[61978]: DEBUG oslo_vmware.api [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Task: {'id': task-1394587, 'name': PowerOffVM_Task, 'duration_secs': 0.269661} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.382021] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] [instance: e249c706-3196-4593-ae96-53f2619e0243] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 851.382021] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] [instance: e249c706-3196-4593-ae96-53f2619e0243] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 851.382021] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b454d009-de6b-45d9-a16f-f4b3ef942746 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.456688] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] [instance: e249c706-3196-4593-ae96-53f2619e0243] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 851.457083] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] [instance: e249c706-3196-4593-ae96-53f2619e0243] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 851.457583] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Deleting the datastore file [datastore2] e249c706-3196-4593-ae96-53f2619e0243 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 851.457583] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b320eec1-2877-4c87-b24a-aeed8dbcc1f5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.465761] env[61978]: DEBUG oslo_vmware.api [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Waiting for the task: (returnval){ [ 851.465761] env[61978]: value = "task-1394589" [ 851.465761] env[61978]: _type = "Task" [ 851.465761] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.477171] env[61978]: DEBUG oslo_vmware.api [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Task: {'id': task-1394589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.479424] env[61978]: DEBUG nova.compute.utils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 851.486595] env[61978]: DEBUG nova.compute.manager [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 851.486595] env[61978]: DEBUG nova.network.neutron [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 851.610112] env[61978]: DEBUG nova.policy [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e607dfc944154c1faed12da382640f0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d7394d965f94155a34dd0ecc0957649', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 851.617081] env[61978]: DEBUG oslo_concurrency.lockutils [None req-960df2b4-4f41-4116-ad9e-3709fc61a224 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Lock "66ee1fd7-40f7-461f-b0c6-5951a58ac660" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.842s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.660605] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "d2614f71-3026-41d4-ae04-eaede9b5ead5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.660605] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "d2614f71-3026-41d4-ae04-eaede9b5ead5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.684682] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394584, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.755347] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.755614] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 851.755829] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.892466] env[61978]: DEBUG nova.network.neutron [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.975841] env[61978]: DEBUG oslo_vmware.api [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Task: {'id': task-1394589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.381848} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.976105] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 851.976292] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] [instance: e249c706-3196-4593-ae96-53f2619e0243] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 851.976468] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] [instance: e249c706-3196-4593-ae96-53f2619e0243] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 851.976639] env[61978]: INFO nova.compute.manager [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] [instance: e249c706-3196-4593-ae96-53f2619e0243] Took 1.13 seconds to destroy the instance on the hypervisor. [ 851.976873] env[61978]: DEBUG oslo.service.loopingcall [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.977084] env[61978]: DEBUG nova.compute.manager [-] [instance: e249c706-3196-4593-ae96-53f2619e0243] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 851.977311] env[61978]: DEBUG nova.network.neutron [-] [instance: e249c706-3196-4593-ae96-53f2619e0243] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 851.983510] env[61978]: DEBUG nova.compute.manager [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 852.124184] env[61978]: DEBUG nova.compute.manager [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 852.138712] env[61978]: DEBUG nova.network.neutron [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] [instance: b26a4784-698d-477a-8db7-58156899d231] Updated VIF entry in instance network info cache for port 3a886f4f-5f7c-4f97-8f00-2555aebe9856. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 852.139106] env[61978]: DEBUG nova.network.neutron [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance_info_cache with network_info: [{"id": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "address": "fa:16:3e:94:aa:92", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a886f4f-5f", "ovs_interfaceid": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.185954] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394584, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.015496} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.188748] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Copied Virtual Disk [datastore1] vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk to [datastore1] vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 852.188969] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Deleting the datastore file [datastore1] vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0/tmp-sparse.vmdk {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 852.189425] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8486894f-de93-43f9-ad26-1bebd8853916 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.201188] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 852.201188] env[61978]: value = "task-1394590" [ 852.201188] env[61978]: _type = "Task" [ 852.201188] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.209529] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.248263] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d28982-8f2b-4736-bb26-bc6a2725e9ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.259261] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ade1b0f-efc5-4f10-a8c7-c784db057a19 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.299739] env[61978]: DEBUG nova.network.neutron [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Updating instance_info_cache with network_info: [{"id": "15ee1476-11da-4794-a070-c4365a572948", "address": "fa:16:3e:02:56:f2", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15ee1476-11", "ovs_interfaceid": "15ee1476-11da-4794-a070-c4365a572948", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.301545] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c590b4-9e4f-4056-9cc5-a3d99e1ee83e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.310945] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c36468-71fe-4a3b-89ee-8ba98b441873 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.327691] env[61978]: DEBUG nova.compute.provider_tree [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.646401] env[61978]: DEBUG oslo_concurrency.lockutils [req-a528a666-a639-4a2a-bb7b-a5f12388f40a req-cb74bf4f-4cff-40e6-8ced-f52840bc4929 service nova] Releasing lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.647234] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.711400] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.114458} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.711400] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 852.711400] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Moving file from [datastore1] vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750/4732143d-796a-4a66-9f1e-806f8b0654e0 to [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0. {{(pid=61978) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 852.711660] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-165dcd95-c3cf-46c9-8aa6-336a60fff7c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.722834] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 852.722834] env[61978]: value = "task-1394591" [ 852.722834] env[61978]: _type = "Task" [ 852.722834] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.734536] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394591, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.806701] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "refresh_cache-92eb5edb-803b-48d4-8c4f-338d7c3b3d13" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.807249] env[61978]: DEBUG nova.compute.manager [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Instance network_info: |[{"id": "15ee1476-11da-4794-a070-c4365a572948", "address": "fa:16:3e:02:56:f2", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15ee1476-11", "ovs_interfaceid": "15ee1476-11da-4794-a070-c4365a572948", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 852.807711] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:56:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4c8c8fd-baca-4e60-97dc-ff0418d63215', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15ee1476-11da-4794-a070-c4365a572948', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 852.818788] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Creating folder: Project (6d7394d965f94155a34dd0ecc0957649). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 852.818994] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e35f236-0693-4435-afd0-367e524f492e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.830382] env[61978]: DEBUG nova.scheduler.client.report [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 852.836754] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Created folder: Project (6d7394d965f94155a34dd0ecc0957649) in parent group-v295764. [ 852.836964] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Creating folder: Instances. Parent ref: group-v295790. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 852.837307] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b1b30f5-48eb-46b7-be57-0335caf80d75 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.850947] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Created folder: Instances in parent group-v295790. [ 852.851237] env[61978]: DEBUG oslo.service.loopingcall [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 852.851468] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 852.851746] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d694936d-138f-40a1-be48-707b5d13498a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.876572] env[61978]: DEBUG nova.network.neutron [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Successfully created port: 5cb15476-a062-4b08-8f77-6955b8086740 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 852.883156] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 852.883156] env[61978]: value = "task-1394594" [ 852.883156] env[61978]: _type = "Task" [ 852.883156] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.892288] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394594, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.998607] env[61978]: DEBUG nova.compute.manager [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 853.036539] env[61978]: DEBUG nova.virt.hardware [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.036777] env[61978]: DEBUG nova.virt.hardware [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.036940] env[61978]: DEBUG nova.virt.hardware [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.038192] env[61978]: DEBUG nova.virt.hardware [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.038389] env[61978]: DEBUG nova.virt.hardware [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.038576] env[61978]: DEBUG nova.virt.hardware [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.038813] env[61978]: DEBUG nova.virt.hardware [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.038997] env[61978]: DEBUG nova.virt.hardware [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.039216] env[61978]: DEBUG nova.virt.hardware [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.039391] env[61978]: DEBUG nova.virt.hardware [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.039575] env[61978]: DEBUG nova.virt.hardware [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.040770] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ac711f-f3da-4a22-ac4d-9a227b8de4f3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.047956] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.048426] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.053481] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a2d652-5de6-4205-b104-5bd46198b7e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.236996] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394591, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.039067} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.238143] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] File moved {{(pid=61978) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 853.238143] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Cleaning up location [datastore1] vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 853.238143] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Deleting the datastore file [datastore1] vmware_temp/651b6e1b-792b-4588-9c41-fa8dcd04a750 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 853.238143] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c62c72c9-b03b-40c7-a6c7-183b7990da67 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.246577] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 853.246577] env[61978]: value = "task-1394595" [ 853.246577] env[61978]: _type = "Task" [ 853.246577] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.255415] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394595, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.342684] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.343383] env[61978]: DEBUG nova.compute.manager [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 853.346196] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.683s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.347871] env[61978]: INFO nova.compute.claims [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 853.357288] env[61978]: DEBUG nova.network.neutron [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Successfully updated port: 2daa968c-ac9c-4f15-ad2b-7977f5581ef1 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 853.394676] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394594, 'name': CreateVM_Task, 'duration_secs': 0.469285} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.395027] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 853.396108] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.396420] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.396821] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 853.397545] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-964e9a95-5270-4011-ba43-68c07572b598 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.407019] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 853.407019] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523f02f6-525e-85d6-8356-cf383c2f47ec" [ 853.407019] env[61978]: _type = "Task" [ 853.407019] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.420101] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523f02f6-525e-85d6-8356-cf383c2f47ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009219} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.420101] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.420101] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.420101] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.487340] env[61978]: DEBUG nova.network.neutron [-] [instance: e249c706-3196-4593-ae96-53f2619e0243] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.680635] env[61978]: DEBUG nova.compute.manager [None req-1b202a77-6e1e-4cd5-812a-00c811d42288 tempest-ServerExternalEventsTest-1445182087 tempest-ServerExternalEventsTest-1445182087-project] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Received event network-changed {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.680635] env[61978]: DEBUG nova.compute.manager [None req-1b202a77-6e1e-4cd5-812a-00c811d42288 tempest-ServerExternalEventsTest-1445182087 tempest-ServerExternalEventsTest-1445182087-project] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Refreshing instance network info cache due to event network-changed. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 853.680635] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1b202a77-6e1e-4cd5-812a-00c811d42288 tempest-ServerExternalEventsTest-1445182087 tempest-ServerExternalEventsTest-1445182087-project] Acquiring lock "refresh_cache-66ee1fd7-40f7-461f-b0c6-5951a58ac660" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.680635] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1b202a77-6e1e-4cd5-812a-00c811d42288 tempest-ServerExternalEventsTest-1445182087 tempest-ServerExternalEventsTest-1445182087-project] Acquired lock "refresh_cache-66ee1fd7-40f7-461f-b0c6-5951a58ac660" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.680635] env[61978]: DEBUG nova.network.neutron [None req-1b202a77-6e1e-4cd5-812a-00c811d42288 tempest-ServerExternalEventsTest-1445182087 tempest-ServerExternalEventsTest-1445182087-project] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.768818] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394595, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03884} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.768818] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 853.768818] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7d1e692-1998-4e9c-b070-0c8e16b101e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.775222] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 853.775222] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5274b5f8-26b9-b507-ab5f-eb8914542177" [ 853.775222] env[61978]: _type = "Task" [ 853.775222] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.786352] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5274b5f8-26b9-b507-ab5f-eb8914542177, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.854216] env[61978]: DEBUG nova.compute.utils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 853.854621] env[61978]: DEBUG nova.compute.manager [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 853.854912] env[61978]: DEBUG nova.network.neutron [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 853.867929] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.867929] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.867929] env[61978]: DEBUG nova.network.neutron [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.934103] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Acquiring lock "2f5b06f6-7178-4fdf-93b6-65477f020898" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.934172] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Lock "2f5b06f6-7178-4fdf-93b6-65477f020898" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.936122] env[61978]: DEBUG nova.policy [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6e565044d2341ab8956321e6cae9cac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67e9b6ecf8574e4da08a38921882b013', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 853.993195] env[61978]: INFO nova.compute.manager [-] [instance: e249c706-3196-4593-ae96-53f2619e0243] Took 2.01 seconds to deallocate network for instance. [ 854.285356] env[61978]: DEBUG nova.compute.manager [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Received event network-vif-deleted-3f1e490b-cbe2-45bd-9ad3-dc8c1b0acd33 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 854.285942] env[61978]: DEBUG nova.compute.manager [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Received event network-vif-plugged-15ee1476-11da-4794-a070-c4365a572948 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 854.285942] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] Acquiring lock "92eb5edb-803b-48d4-8c4f-338d7c3b3d13-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.285942] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] Lock "92eb5edb-803b-48d4-8c4f-338d7c3b3d13-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.286932] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] Lock "92eb5edb-803b-48d4-8c4f-338d7c3b3d13-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.287179] env[61978]: DEBUG nova.compute.manager [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] No waiting events found dispatching network-vif-plugged-15ee1476-11da-4794-a070-c4365a572948 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 854.287344] env[61978]: WARNING nova.compute.manager [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Received unexpected event network-vif-plugged-15ee1476-11da-4794-a070-c4365a572948 for instance with vm_state building and task_state spawning. [ 854.287639] env[61978]: DEBUG nova.compute.manager [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Received event network-changed-15ee1476-11da-4794-a070-c4365a572948 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 854.287828] env[61978]: DEBUG nova.compute.manager [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Refreshing instance network info cache due to event network-changed-15ee1476-11da-4794-a070-c4365a572948. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 854.288248] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] Acquiring lock "refresh_cache-92eb5edb-803b-48d4-8c4f-338d7c3b3d13" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.288578] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] Acquired lock "refresh_cache-92eb5edb-803b-48d4-8c4f-338d7c3b3d13" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.288635] env[61978]: DEBUG nova.network.neutron [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Refreshing network info cache for port 15ee1476-11da-4794-a070-c4365a572948 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.297794] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5274b5f8-26b9-b507-ab5f-eb8914542177, 'name': SearchDatastore_Task, 'duration_secs': 0.010291} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.298232] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.298438] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 5d48e854-45fd-4767-91b7-100f84bdca55/5d48e854-45fd-4767-91b7-100f84bdca55.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 854.298707] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.298982] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 854.299128] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf47a0df-fbf3-41cd-a955-8af3a8d7591c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.301822] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-009fbd4e-3d23-4d51-a30c-25421c2a0aa4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.312201] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 854.312201] env[61978]: value = "task-1394596" [ 854.312201] env[61978]: _type = "Task" [ 854.312201] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.314919] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 854.315852] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 854.321036] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3d1653d-0c58-4588-8352-de0b775a3cec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.333086] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394596, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.334103] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 854.334103] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a4c26c-34df-82dc-65a8-968eb4f0d19e" [ 854.334103] env[61978]: _type = "Task" [ 854.334103] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.343790] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a4c26c-34df-82dc-65a8-968eb4f0d19e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.359892] env[61978]: DEBUG nova.compute.manager [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 854.467367] env[61978]: DEBUG nova.network.neutron [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Successfully created port: a9f70cfc-477a-44cc-8077-b23baa39cda7 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 854.502236] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.589563] env[61978]: DEBUG nova.network.neutron [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 854.693803] env[61978]: DEBUG nova.network.neutron [None req-1b202a77-6e1e-4cd5-812a-00c811d42288 tempest-ServerExternalEventsTest-1445182087 tempest-ServerExternalEventsTest-1445182087-project] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Updating instance_info_cache with network_info: [{"id": "11772575-2be7-44fb-b865-5690433fd23d", "address": "fa:16:3e:c2:8f:4f", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11772575-2b", "ovs_interfaceid": "11772575-2be7-44fb-b865-5690433fd23d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.774688] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51908c4b-dee9-41d5-8d2a-a7fc73673ed0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.789851] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acfcd67d-2e8a-4e69-8b39-bffec79d04d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.837565] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba4145b-3f89-4596-8175-afe1d4fac274 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.858040] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a4c26c-34df-82dc-65a8-968eb4f0d19e, 'name': SearchDatastore_Task, 'duration_secs': 0.027727} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.858040] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394596, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.859451] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8030fe71-0176-42d1-9263-bc7a5a37764d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.863577] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-269f6cf6-f639-44c2-8ee1-1e3f92a92c97 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.876601] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 854.876601] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52757e55-aa3e-4410-3225-bc1fa52758f7" [ 854.876601] env[61978]: _type = "Task" [ 854.876601] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.886096] env[61978]: DEBUG nova.compute.provider_tree [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.895919] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52757e55-aa3e-4410-3225-bc1fa52758f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.111416] env[61978]: DEBUG nova.network.neutron [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updating instance_info_cache with network_info: [{"id": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "address": "fa:16:3e:0c:ea:2e", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2daa968c-ac", "ovs_interfaceid": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.198451] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1b202a77-6e1e-4cd5-812a-00c811d42288 tempest-ServerExternalEventsTest-1445182087 tempest-ServerExternalEventsTest-1445182087-project] Releasing lock "refresh_cache-66ee1fd7-40f7-461f-b0c6-5951a58ac660" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.300569] env[61978]: DEBUG nova.network.neutron [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Successfully updated port: 5cb15476-a062-4b08-8f77-6955b8086740 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 855.333143] env[61978]: DEBUG nova.network.neutron [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Updated VIF entry in instance network info cache for port 15ee1476-11da-4794-a070-c4365a572948. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 855.333478] env[61978]: DEBUG nova.network.neutron [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Updating instance_info_cache with network_info: [{"id": "15ee1476-11da-4794-a070-c4365a572948", "address": "fa:16:3e:02:56:f2", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15ee1476-11", "ovs_interfaceid": "15ee1476-11da-4794-a070-c4365a572948", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.353410] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394596, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.718863} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.353749] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 5d48e854-45fd-4767-91b7-100f84bdca55/5d48e854-45fd-4767-91b7-100f84bdca55.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 855.354099] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.355056] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9161d8fa-9287-43d5-b698-e889ae74ba7e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.368376] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 855.368376] env[61978]: value = "task-1394597" [ 855.368376] env[61978]: _type = "Task" [ 855.368376] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.372408] env[61978]: DEBUG nova.compute.manager [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 855.379267] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394597, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.391714] env[61978]: DEBUG nova.scheduler.client.report [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 855.395085] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52757e55-aa3e-4410-3225-bc1fa52758f7, 'name': SearchDatastore_Task, 'duration_secs': 0.053421} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.395741] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.396051] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 96a38ed0-c880-4f21-9389-99f039279072/96a38ed0-c880-4f21-9389-99f039279072.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 855.396596] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.396784] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.396993] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84ccd598-4d24-4522-94cf-fcacfc5ce09d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.399663] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcabfadb-6fd8-4ae1-9b5c-b1f16f1597f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.409837] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 855.409837] env[61978]: value = "task-1394598" [ 855.409837] env[61978]: _type = "Task" [ 855.409837] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.414035] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.414270] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.418731] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1635d40c-0bd3-4583-9e50-a1581c627113 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.424168] env[61978]: DEBUG nova.virt.hardware [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 855.424412] env[61978]: DEBUG nova.virt.hardware [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 855.424564] env[61978]: DEBUG nova.virt.hardware [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 855.424753] env[61978]: DEBUG nova.virt.hardware [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 855.426672] env[61978]: DEBUG nova.virt.hardware [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 855.426672] env[61978]: DEBUG nova.virt.hardware [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 855.426672] env[61978]: DEBUG nova.virt.hardware [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 855.426672] env[61978]: DEBUG nova.virt.hardware [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 855.426672] env[61978]: DEBUG nova.virt.hardware [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 855.426982] env[61978]: DEBUG nova.virt.hardware [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 855.426982] env[61978]: DEBUG nova.virt.hardware [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 855.427348] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d252908b-dde4-404a-bc98-5fab229807bd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.433986] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 855.433986] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52645083-e1d4-74ee-7fa4-3d46d9432c34" [ 855.433986] env[61978]: _type = "Task" [ 855.433986] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.437680] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.446601] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e578694a-ab7f-4af1-8811-a1f8f8a3e481 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.455924] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52645083-e1d4-74ee-7fa4-3d46d9432c34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.510035] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Acquiring lock "66ee1fd7-40f7-461f-b0c6-5951a58ac660" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.510035] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Lock "66ee1fd7-40f7-461f-b0c6-5951a58ac660" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.510189] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Acquiring lock "66ee1fd7-40f7-461f-b0c6-5951a58ac660-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.510326] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Lock "66ee1fd7-40f7-461f-b0c6-5951a58ac660-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.510522] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Lock "66ee1fd7-40f7-461f-b0c6-5951a58ac660-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.513060] env[61978]: INFO nova.compute.manager [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Terminating instance [ 855.515343] env[61978]: DEBUG nova.compute.manager [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 855.516632] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.516632] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f74604-a194-4c79-9c9e-6e943abeaafc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.526097] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.526356] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e13da405-b248-40f2-b75b-76c7bedf5c20 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.547616] env[61978]: DEBUG oslo_vmware.api [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Waiting for the task: (returnval){ [ 855.547616] env[61978]: value = "task-1394599" [ 855.547616] env[61978]: _type = "Task" [ 855.547616] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.556152] env[61978]: DEBUG oslo_vmware.api [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.615682] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.616049] env[61978]: DEBUG nova.compute.manager [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Instance network_info: |[{"id": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "address": "fa:16:3e:0c:ea:2e", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2daa968c-ac", "ovs_interfaceid": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 855.616488] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:ea:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2daa968c-ac9c-4f15-ad2b-7977f5581ef1', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 855.625359] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Creating folder: Project (2a33ac41ae0247b59c400c6ed9145239). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 855.626323] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48d9b04f-3b87-408e-84e8-4d99725a33c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.642256] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Created folder: Project (2a33ac41ae0247b59c400c6ed9145239) in parent group-v295764. [ 855.642943] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Creating folder: Instances. Parent ref: group-v295793. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 855.642943] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-acd9e693-46de-4c78-b9bb-cbb95c60e393 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.658426] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Created folder: Instances in parent group-v295793. [ 855.659320] env[61978]: DEBUG oslo.service.loopingcall [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.659320] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 855.659320] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1fe0acd-cde4-42ff-88b7-ed60ef58ec7f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.691822] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 855.691822] env[61978]: value = "task-1394602" [ 855.691822] env[61978]: _type = "Task" [ 855.691822] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.707460] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394602, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.807592] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "refresh_cache-85fc5af8-454d-4042-841a-945b7e84eb6c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.807816] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "refresh_cache-85fc5af8-454d-4042-841a-945b7e84eb6c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.807816] env[61978]: DEBUG nova.network.neutron [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 855.847357] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff59c86a-6eac-4399-95f9-93fbe0a85509 req-7d254c18-b8f9-488d-b54f-296c859100b6 service nova] Releasing lock "refresh_cache-92eb5edb-803b-48d4-8c4f-338d7c3b3d13" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.880345] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394597, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104854} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.880822] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 855.881978] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32aef7a-e7b5-4410-a09e-2fa775d2b7b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.902638] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.903244] env[61978]: DEBUG nova.compute.manager [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 855.916041] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 5d48e854-45fd-4767-91b7-100f84bdca55/5d48e854-45fd-4767-91b7-100f84bdca55.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 855.916643] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.867s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.918224] env[61978]: INFO nova.compute.claims [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.921468] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f83e5c14-44bd-4b61-9d7f-85bcfb499cce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.963483] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52645083-e1d4-74ee-7fa4-3d46d9432c34, 'name': SearchDatastore_Task, 'duration_secs': 0.026959} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.968107] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394598, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.969379] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 855.969379] env[61978]: value = "task-1394603" [ 855.969379] env[61978]: _type = "Task" [ 855.969379] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.969379] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a371146-589c-49e3-9dcd-0d20fcc73fc1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.981742] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 855.981742] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524f29f8-feca-f854-2071-deddeea723b8" [ 855.981742] env[61978]: _type = "Task" [ 855.981742] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.986282] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394603, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.996445] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524f29f8-feca-f854-2071-deddeea723b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.061103] env[61978]: DEBUG oslo_vmware.api [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.207433] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394602, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.373963] env[61978]: DEBUG nova.network.neutron [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 856.418582] env[61978]: DEBUG nova.compute.utils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 856.420348] env[61978]: DEBUG nova.compute.manager [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 856.420467] env[61978]: DEBUG nova.network.neutron [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 856.433496] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394598, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598827} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.433813] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 96a38ed0-c880-4f21-9389-99f039279072/96a38ed0-c880-4f21-9389-99f039279072.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 856.434136] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 856.434420] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e09d7e3-584b-456b-8785-8fe170052860 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.443400] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 856.443400] env[61978]: value = "task-1394604" [ 856.443400] env[61978]: _type = "Task" [ 856.443400] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.456879] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394604, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.486096] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394603, 'name': ReconfigVM_Task, 'duration_secs': 0.31566} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.487053] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 5d48e854-45fd-4767-91b7-100f84bdca55/5d48e854-45fd-4767-91b7-100f84bdca55.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.493375] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-462cc823-fe29-4778-a05b-060038f7f70d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.502688] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524f29f8-feca-f854-2071-deddeea723b8, 'name': SearchDatastore_Task, 'duration_secs': 0.013769} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.504171] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.504474] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] b26a4784-698d-477a-8db7-58156899d231/b26a4784-698d-477a-8db7-58156899d231.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.505312] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 856.505312] env[61978]: value = "task-1394605" [ 856.505312] env[61978]: _type = "Task" [ 856.505312] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.505312] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.505312] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 856.505955] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79f1421f-b48b-4097-81fa-52ead76e5ad0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.508031] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58cc37b9-86e0-4627-9c8a-d46aa121244f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.523067] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394605, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.523067] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 856.523067] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 856.523278] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 856.523278] env[61978]: value = "task-1394606" [ 856.523278] env[61978]: _type = "Task" [ 856.523278] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.523401] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63448c7a-3bbb-414d-be57-560459374268 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.535268] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394606, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.548798] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 856.548798] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5228a6e0-2eb6-e250-b26d-1a6fe3499a15" [ 856.548798] env[61978]: _type = "Task" [ 856.548798] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.565321] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5228a6e0-2eb6-e250-b26d-1a6fe3499a15, 'name': SearchDatastore_Task, 'duration_secs': 0.011217} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.569947] env[61978]: DEBUG nova.policy [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '294b52266c6c4aecafc7e8146d727f97', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b51fc76950ff44b69f26a6977be315bc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 856.571870] env[61978]: DEBUG oslo_vmware.api [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.572180] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32680558-c7e0-4627-995d-2486b4619a99 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.579693] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 856.579693] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d837d8-05ae-ab62-9877-d92a0d1af049" [ 856.579693] env[61978]: _type = "Task" [ 856.579693] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.589379] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d837d8-05ae-ab62-9877-d92a0d1af049, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.708023] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394602, 'name': CreateVM_Task, 'duration_secs': 0.591196} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.708319] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 856.709321] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.709321] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.709701] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 856.710074] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6be33dfc-f16d-40db-8841-4b6c34bd97d6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.718386] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 856.718386] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5231a660-1bc2-9fc8-f440-2d3215aa75fa" [ 856.718386] env[61978]: _type = "Task" [ 856.718386] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.727720] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5231a660-1bc2-9fc8-f440-2d3215aa75fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.931430] env[61978]: DEBUG nova.compute.manager [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 856.962661] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394604, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068038} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.963813] env[61978]: DEBUG nova.network.neutron [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Updating instance_info_cache with network_info: [{"id": "5cb15476-a062-4b08-8f77-6955b8086740", "address": "fa:16:3e:d1:bb:14", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5cb15476-a0", "ovs_interfaceid": "5cb15476-a062-4b08-8f77-6955b8086740", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.965459] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 856.966083] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74a4267-a584-4c97-aab8-f82c93661dbd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.999915] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 96a38ed0-c880-4f21-9389-99f039279072/96a38ed0-c880-4f21-9389-99f039279072.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 857.003703] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34683a92-f8fd-4e15-8196-36fd09450102 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.036099] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 857.036099] env[61978]: value = "task-1394607" [ 857.036099] env[61978]: _type = "Task" [ 857.036099] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.036321] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394605, 'name': Rename_Task, 'duration_secs': 0.190864} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.039580] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 857.045132] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-381b34f1-d949-4ff2-bec7-3d15dd76c2b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.055136] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394606, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.061173] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394607, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.061827] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 857.061827] env[61978]: value = "task-1394608" [ 857.061827] env[61978]: _type = "Task" [ 857.061827] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.071411] env[61978]: DEBUG oslo_vmware.api [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394599, 'name': PowerOffVM_Task, 'duration_secs': 1.216319} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.072273] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 857.072484] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 857.072781] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4295aaf0-c118-45e2-9623-59733587dde7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.078233] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.092245] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d837d8-05ae-ab62-9877-d92a0d1af049, 'name': SearchDatastore_Task, 'duration_secs': 0.009776} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.092639] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.092822] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13/92eb5edb-803b-48d4-8c4f-338d7c3b3d13.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 857.093019] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01c7b48e-482e-4916-a7c1-dfb8c422716f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.104644] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 857.104644] env[61978]: value = "task-1394610" [ 857.104644] env[61978]: _type = "Task" [ 857.104644] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.113808] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394610, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.163135] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 857.163579] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 857.163944] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Deleting the datastore file [datastore2] 66ee1fd7-40f7-461f-b0c6-5951a58ac660 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 857.165376] env[61978]: DEBUG nova.network.neutron [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Successfully updated port: a9f70cfc-477a-44cc-8077-b23baa39cda7 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 857.166252] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99cec4f0-c5fd-482b-a5fb-f3cf85a54b03 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.177737] env[61978]: DEBUG oslo_vmware.api [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Waiting for the task: (returnval){ [ 857.177737] env[61978]: value = "task-1394611" [ 857.177737] env[61978]: _type = "Task" [ 857.177737] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.198912] env[61978]: DEBUG oslo_vmware.api [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394611, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.231275] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5231a660-1bc2-9fc8-f440-2d3215aa75fa, 'name': SearchDatastore_Task, 'duration_secs': 0.058432} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.231617] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.231890] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 857.232191] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.232368] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.232647] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 857.232959] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f901746-e0fd-465c-84ed-733f4f572837 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.254834] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 857.255064] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 857.258673] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57fe2e30-0a70-4e08-bcb4-5601c3ad3e0d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.266086] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 857.266086] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fde80e-c51c-bd8a-9ef8-a74ba9863b10" [ 857.266086] env[61978]: _type = "Task" [ 857.266086] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.275153] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fde80e-c51c-bd8a-9ef8-a74ba9863b10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.345053] env[61978]: DEBUG nova.network.neutron [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Successfully created port: 82595737-f96a-45c3-9bcc-2642e53bdaec {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 857.369601] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600e761d-9cad-4b3b-b425-d168baee0da5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.380413] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b68427-5db9-437f-8c3d-3791b28e262f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.421607] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b712a5c-1f9e-4f08-916c-4f7dfc3ae070 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.430969] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3185abc-0e94-4fe1-81e3-cdf43c559428 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.456901] env[61978]: DEBUG nova.compute.provider_tree [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.470334] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "refresh_cache-85fc5af8-454d-4042-841a-945b7e84eb6c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.470881] env[61978]: DEBUG nova.compute.manager [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Instance network_info: |[{"id": "5cb15476-a062-4b08-8f77-6955b8086740", "address": "fa:16:3e:d1:bb:14", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5cb15476-a0", "ovs_interfaceid": "5cb15476-a062-4b08-8f77-6955b8086740", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 857.471402] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:bb:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4c8c8fd-baca-4e60-97dc-ff0418d63215', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5cb15476-a062-4b08-8f77-6955b8086740', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 857.479340] env[61978]: DEBUG oslo.service.loopingcall [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 857.479595] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 857.480268] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3613c0b-29d6-4765-9cf0-7c84de241e4e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.504404] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 857.504404] env[61978]: value = "task-1394612" [ 857.504404] env[61978]: _type = "Task" [ 857.504404] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.515187] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394612, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.551816] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394606, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538851} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.556071] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] b26a4784-698d-477a-8db7-58156899d231/b26a4784-698d-477a-8db7-58156899d231.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.556071] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.556071] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394607, 'name': ReconfigVM_Task, 'duration_secs': 0.473484} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.556248] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-533fd0c4-e668-4c02-8914-790e8f7c56c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.558759] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 96a38ed0-c880-4f21-9389-99f039279072/96a38ed0-c880-4f21-9389-99f039279072.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 857.559489] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-684583b4-352d-42fc-9342-83702cc389a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.572272] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 857.572272] env[61978]: value = "task-1394614" [ 857.572272] env[61978]: _type = "Task" [ 857.572272] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.572272] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 857.572272] env[61978]: value = "task-1394613" [ 857.572272] env[61978]: _type = "Task" [ 857.572272] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.576134] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394608, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.590634] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394613, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.595156] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394614, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.620442] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394610, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.670846] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Acquiring lock "refresh_cache-2084a365-b662-4564-b899-ab4c4a63f2b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.670846] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Acquired lock "refresh_cache-2084a365-b662-4564-b899-ab4c4a63f2b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.670846] env[61978]: DEBUG nova.network.neutron [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 857.696388] env[61978]: DEBUG oslo_vmware.api [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Task: {'id': task-1394611, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237587} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.697386] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 857.697386] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 857.697386] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 857.697386] env[61978]: INFO nova.compute.manager [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Took 2.18 seconds to destroy the instance on the hypervisor. [ 857.697809] env[61978]: DEBUG oslo.service.loopingcall [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 857.697809] env[61978]: DEBUG nova.compute.manager [-] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 857.697809] env[61978]: DEBUG nova.network.neutron [-] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 857.785866] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fde80e-c51c-bd8a-9ef8-a74ba9863b10, 'name': SearchDatastore_Task, 'duration_secs': 0.029197} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.787376] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b006b2d2-1b04-499a-b364-23b90b227570 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.794727] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 857.794727] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52028013-593f-2bfb-4e26-ac1fafed1842" [ 857.794727] env[61978]: _type = "Task" [ 857.794727] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.807545] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52028013-593f-2bfb-4e26-ac1fafed1842, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.957065] env[61978]: DEBUG nova.compute.manager [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 857.961455] env[61978]: DEBUG nova.scheduler.client.report [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 857.990240] env[61978]: DEBUG nova.virt.hardware [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 857.990484] env[61978]: DEBUG nova.virt.hardware [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 857.990639] env[61978]: DEBUG nova.virt.hardware [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 857.990816] env[61978]: DEBUG nova.virt.hardware [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 857.990962] env[61978]: DEBUG nova.virt.hardware [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 857.991165] env[61978]: DEBUG nova.virt.hardware [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 857.991377] env[61978]: DEBUG nova.virt.hardware [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 857.991534] env[61978]: DEBUG nova.virt.hardware [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 857.992836] env[61978]: DEBUG nova.virt.hardware [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 857.992836] env[61978]: DEBUG nova.virt.hardware [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 857.992836] env[61978]: DEBUG nova.virt.hardware [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 857.993292] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b069214-f161-4bec-bc88-432d6b600d10 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.012951] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0549a363-0d09-4da8-bf64-1d625dfaa9b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.034809] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394612, 'name': CreateVM_Task, 'duration_secs': 0.397493} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.034998] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 858.035670] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.035847] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.036144] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 858.036390] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79ccb393-4bda-4bdf-b90f-dfb6fdd16d08 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.043861] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 858.043861] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b4cd3c-e768-f863-12cb-d3d7ef522bbb" [ 858.043861] env[61978]: _type = "Task" [ 858.043861] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.056589] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b4cd3c-e768-f863-12cb-d3d7ef522bbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.074929] env[61978]: DEBUG oslo_vmware.api [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394608, 'name': PowerOnVM_Task, 'duration_secs': 0.725813} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.075703] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 858.075703] env[61978]: INFO nova.compute.manager [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Took 14.89 seconds to spawn the instance on the hypervisor. [ 858.075703] env[61978]: DEBUG nova.compute.manager [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 858.076523] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af540e9d-5567-4e33-aa29-3aedd160bd59 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.105111] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394613, 'name': Rename_Task, 'duration_secs': 0.257643} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.105360] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394614, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113012} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.108322] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 858.108600] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 858.109090] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f237246e-beae-4ff9-b48f-603ec23e05f1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.111318] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed0fe0a-bf95-4b3e-9f30-d3ce32d5ef9f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.127468] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394610, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.149331] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] b26a4784-698d-477a-8db7-58156899d231/b26a4784-698d-477a-8db7-58156899d231.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 858.149733] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 858.149733] env[61978]: value = "task-1394615" [ 858.149733] env[61978]: _type = "Task" [ 858.149733] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.149986] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca12b44a-380a-4231-9251-e96ccf5725eb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.177134] env[61978]: DEBUG nova.compute.manager [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Received event network-vif-plugged-2daa968c-ac9c-4f15-ad2b-7977f5581ef1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 858.177846] env[61978]: DEBUG oslo_concurrency.lockutils [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] Acquiring lock "243e7146-46fc-43f4-a83b-cdc58f397f9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.177846] env[61978]: DEBUG oslo_concurrency.lockutils [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] Lock "243e7146-46fc-43f4-a83b-cdc58f397f9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.177846] env[61978]: DEBUG oslo_concurrency.lockutils [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] Lock "243e7146-46fc-43f4-a83b-cdc58f397f9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.178264] env[61978]: DEBUG nova.compute.manager [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] No waiting events found dispatching network-vif-plugged-2daa968c-ac9c-4f15-ad2b-7977f5581ef1 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 858.178264] env[61978]: WARNING nova.compute.manager [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Received unexpected event network-vif-plugged-2daa968c-ac9c-4f15-ad2b-7977f5581ef1 for instance with vm_state building and task_state spawning. [ 858.178264] env[61978]: DEBUG nova.compute.manager [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: e249c706-3196-4593-ae96-53f2619e0243] Received event network-vif-deleted-2bb74ad2-1c4e-4d05-ab88-06e859b1a378 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 858.181450] env[61978]: DEBUG nova.compute.manager [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Received event network-changed-2daa968c-ac9c-4f15-ad2b-7977f5581ef1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 858.181450] env[61978]: DEBUG nova.compute.manager [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Refreshing instance network info cache due to event network-changed-2daa968c-ac9c-4f15-ad2b-7977f5581ef1. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 858.181450] env[61978]: DEBUG oslo_concurrency.lockutils [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] Acquiring lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.181450] env[61978]: DEBUG oslo_concurrency.lockutils [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] Acquired lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.181450] env[61978]: DEBUG nova.network.neutron [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Refreshing network info cache for port 2daa968c-ac9c-4f15-ad2b-7977f5581ef1 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 858.188163] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 858.188163] env[61978]: value = "task-1394616" [ 858.188163] env[61978]: _type = "Task" [ 858.188163] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.193704] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394615, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.206739] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394616, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.232133] env[61978]: DEBUG nova.network.neutron [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 858.294706] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Acquiring lock "a4d45835-f065-445f-bcb6-d1b01d545cb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.296035] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Lock "a4d45835-f065-445f-bcb6-d1b01d545cb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.306592] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52028013-593f-2bfb-4e26-ac1fafed1842, 'name': SearchDatastore_Task, 'duration_secs': 0.032398} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.309594] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.310257] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 243e7146-46fc-43f4-a83b-cdc58f397f9e/243e7146-46fc-43f4-a83b-cdc58f397f9e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.310460] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-768d1a1e-e471-429f-8619-dbf3fff7e5f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.318448] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 858.318448] env[61978]: value = "task-1394617" [ 858.318448] env[61978]: _type = "Task" [ 858.318448] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.327322] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.388268] env[61978]: DEBUG nova.network.neutron [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Updating instance_info_cache with network_info: [{"id": "a9f70cfc-477a-44cc-8077-b23baa39cda7", "address": "fa:16:3e:6f:c4:46", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9f70cfc-47", "ovs_interfaceid": "a9f70cfc-477a-44cc-8077-b23baa39cda7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.465242] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.549s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.465741] env[61978]: DEBUG nova.compute.manager [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 858.468557] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.782s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.472172] env[61978]: INFO nova.compute.claims [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 858.554103] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b4cd3c-e768-f863-12cb-d3d7ef522bbb, 'name': SearchDatastore_Task, 'duration_secs': 0.01126} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.554416] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.554727] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 858.554887] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.555076] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.555302] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 858.555580] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac243fca-801b-4456-b716-9e88f850f92c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.565257] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.565441] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 858.566186] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-829e688a-6f13-4efe-8c68-82a44dee6459 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.574023] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 858.574023] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520d7c63-81ed-9bcd-16dd-867a70b208af" [ 858.574023] env[61978]: _type = "Task" [ 858.574023] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.582319] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520d7c63-81ed-9bcd-16dd-867a70b208af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.631421] env[61978]: INFO nova.compute.manager [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Took 29.06 seconds to build instance. [ 858.632455] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394610, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.682184] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394615, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.709970] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394616, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.728012] env[61978]: DEBUG nova.network.neutron [-] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.830796] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.891666] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Releasing lock "refresh_cache-2084a365-b662-4564-b899-ab4c4a63f2b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.892041] env[61978]: DEBUG nova.compute.manager [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Instance network_info: |[{"id": "a9f70cfc-477a-44cc-8077-b23baa39cda7", "address": "fa:16:3e:6f:c4:46", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9f70cfc-47", "ovs_interfaceid": "a9f70cfc-477a-44cc-8077-b23baa39cda7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 858.892532] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:c4:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9f70cfc-477a-44cc-8077-b23baa39cda7', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 858.901913] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Creating folder: Project (67e9b6ecf8574e4da08a38921882b013). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 858.902554] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1309d1f-43e9-42e9-a482-8e030dae0790 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.915893] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Created folder: Project (67e9b6ecf8574e4da08a38921882b013) in parent group-v295764. [ 858.916134] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Creating folder: Instances. Parent ref: group-v295797. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 858.916377] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e757e1d6-ea9a-4ef6-a512-81c30c06afd5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.929448] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Created folder: Instances in parent group-v295797. [ 858.929694] env[61978]: DEBUG oslo.service.loopingcall [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.929886] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 858.930108] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f11dc64-748b-451f-a8c4-1933f7c85fdc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.945346] env[61978]: DEBUG nova.network.neutron [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updated VIF entry in instance network info cache for port 2daa968c-ac9c-4f15-ad2b-7977f5581ef1. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 858.945939] env[61978]: DEBUG nova.network.neutron [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updating instance_info_cache with network_info: [{"id": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "address": "fa:16:3e:0c:ea:2e", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2daa968c-ac", "ovs_interfaceid": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.953669] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 858.953669] env[61978]: value = "task-1394620" [ 858.953669] env[61978]: _type = "Task" [ 858.953669] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.963242] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394620, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.974012] env[61978]: DEBUG nova.compute.utils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 858.980022] env[61978]: DEBUG nova.compute.manager [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 858.980022] env[61978]: DEBUG nova.network.neutron [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 859.022709] env[61978]: DEBUG nova.policy [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9c731900ec349ab945b14932491dc7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'add5612301884f668bbe80681629e8d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 859.083202] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520d7c63-81ed-9bcd-16dd-867a70b208af, 'name': SearchDatastore_Task, 'duration_secs': 0.014249} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.083975] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8724afd8-a98c-46e8-993d-7980b7ea92f6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.089934] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 859.089934] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f27da4-a204-fe67-f1fb-bc2e76f01d55" [ 859.089934] env[61978]: _type = "Task" [ 859.089934] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.100656] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f27da4-a204-fe67-f1fb-bc2e76f01d55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.121372] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394610, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.559256} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.122492] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13/92eb5edb-803b-48d4-8c4f-338d7c3b3d13.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 859.122836] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 859.123169] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ada81c92-241a-4c8f-a892-7f803bb10328 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.131470] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 859.131470] env[61978]: value = "task-1394621" [ 859.131470] env[61978]: _type = "Task" [ 859.131470] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.138337] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d145d789-b1c8-4409-9193-bfb144a4e7fc tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Lock "5d48e854-45fd-4767-91b7-100f84bdca55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.580s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.149183] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394621, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.187043] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394615, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.209561] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394616, 'name': ReconfigVM_Task, 'duration_secs': 0.97992} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.211195] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Reconfigured VM instance instance-00000008 to attach disk [datastore1] b26a4784-698d-477a-8db7-58156899d231/b26a4784-698d-477a-8db7-58156899d231.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 859.213024] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e80670a6-7a5e-4fac-828a-225bfd945c4d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.222207] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 859.222207] env[61978]: value = "task-1394622" [ 859.222207] env[61978]: _type = "Task" [ 859.222207] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.233854] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Acquiring lock "3ddf7322-5504-408f-af6c-af73fb1c4286" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.233854] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Lock "3ddf7322-5504-408f-af6c-af73fb1c4286" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.233854] env[61978]: INFO nova.compute.manager [-] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Took 1.54 seconds to deallocate network for instance. [ 859.239029] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394622, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.335215] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.450996] env[61978]: DEBUG oslo_concurrency.lockutils [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] Releasing lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.450996] env[61978]: DEBUG nova.compute.manager [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Received event network-vif-plugged-5cb15476-a062-4b08-8f77-6955b8086740 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 859.450996] env[61978]: DEBUG oslo_concurrency.lockutils [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] Acquiring lock "85fc5af8-454d-4042-841a-945b7e84eb6c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.450996] env[61978]: DEBUG oslo_concurrency.lockutils [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] Lock "85fc5af8-454d-4042-841a-945b7e84eb6c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.450996] env[61978]: DEBUG oslo_concurrency.lockutils [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] Lock "85fc5af8-454d-4042-841a-945b7e84eb6c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.451381] env[61978]: DEBUG nova.compute.manager [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] No waiting events found dispatching network-vif-plugged-5cb15476-a062-4b08-8f77-6955b8086740 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 859.451381] env[61978]: WARNING nova.compute.manager [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Received unexpected event network-vif-plugged-5cb15476-a062-4b08-8f77-6955b8086740 for instance with vm_state building and task_state spawning. [ 859.451381] env[61978]: DEBUG nova.compute.manager [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Received event network-changed-5cb15476-a062-4b08-8f77-6955b8086740 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 859.451381] env[61978]: DEBUG nova.compute.manager [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Refreshing instance network info cache due to event network-changed-5cb15476-a062-4b08-8f77-6955b8086740. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 859.451381] env[61978]: DEBUG oslo_concurrency.lockutils [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] Acquiring lock "refresh_cache-85fc5af8-454d-4042-841a-945b7e84eb6c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.451574] env[61978]: DEBUG oslo_concurrency.lockutils [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] Acquired lock "refresh_cache-85fc5af8-454d-4042-841a-945b7e84eb6c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.451574] env[61978]: DEBUG nova.network.neutron [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Refreshing network info cache for port 5cb15476-a062-4b08-8f77-6955b8086740 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 859.466055] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394620, 'name': CreateVM_Task, 'duration_secs': 0.381418} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.466055] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 859.466055] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.466247] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.466537] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 859.467167] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48e4493f-492e-4170-84e8-a6862fe66204 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.472661] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Waiting for the task: (returnval){ [ 859.472661] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5251c2ac-4a90-f2c7-25e2-e051c861069d" [ 859.472661] env[61978]: _type = "Task" [ 859.472661] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.477573] env[61978]: DEBUG nova.compute.manager [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 859.490242] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5251c2ac-4a90-f2c7-25e2-e051c861069d, 'name': SearchDatastore_Task, 'duration_secs': 0.009912} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.490623] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.490872] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 859.491131] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.536318] env[61978]: DEBUG nova.network.neutron [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Successfully created port: 1d9ac1c2-acc1-4cef-8a1e-445797f69a52 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 859.585657] env[61978]: DEBUG nova.network.neutron [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Successfully updated port: 82595737-f96a-45c3-9bcc-2642e53bdaec {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 859.601897] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f27da4-a204-fe67-f1fb-bc2e76f01d55, 'name': SearchDatastore_Task, 'duration_secs': 0.011576} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.603203] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.603562] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 85fc5af8-454d-4042-841a-945b7e84eb6c/85fc5af8-454d-4042-841a-945b7e84eb6c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 859.608084] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.608169] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 859.608940] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-811ee8b7-2405-4e61-bc50-df5cec7835b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.611716] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76c5d366-69a8-4f10-8c36-2567aca6211d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.626037] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 859.626037] env[61978]: value = "task-1394623" [ 859.626037] env[61978]: _type = "Task" [ 859.626037] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.626037] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 859.627537] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 859.630267] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92cd3c58-16fe-450e-af8f-8c21f84a73e7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.644428] env[61978]: DEBUG nova.compute.manager [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 859.648038] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394623, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.649181] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Waiting for the task: (returnval){ [ 859.649181] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524e218e-336b-e377-5b16-a6e016079152" [ 859.649181] env[61978]: _type = "Task" [ 859.649181] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.657322] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394621, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.466427} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.657820] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.660993] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50904a5e-0b08-4da2-8db5-5f4c563ab5ad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.668143] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524e218e-336b-e377-5b16-a6e016079152, 'name': SearchDatastore_Task, 'duration_secs': 0.009617} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.669212] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1be78a69-a030-4554-bf33-9073ecc9df2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.690254] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13/92eb5edb-803b-48d4-8c4f-338d7c3b3d13.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 859.699197] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bdd6b09-fb93-44ba-976f-00f49cba77e7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.717732] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Waiting for the task: (returnval){ [ 859.717732] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52923ac9-71ad-76e2-3cfb-134985fc1842" [ 859.717732] env[61978]: _type = "Task" [ 859.717732] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.728133] env[61978]: DEBUG oslo_vmware.api [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394615, 'name': PowerOnVM_Task, 'duration_secs': 1.194762} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.735429] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 859.735773] env[61978]: DEBUG nova.compute.manager [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 859.736324] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 859.736324] env[61978]: value = "task-1394624" [ 859.736324] env[61978]: _type = "Task" [ 859.736324] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.741554] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd60a04-cf48-4561-93f7-210be060abbc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.753633] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.760078] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52923ac9-71ad-76e2-3cfb-134985fc1842, 'name': SearchDatastore_Task, 'duration_secs': 0.009256} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.769784] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.770273] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 2084a365-b662-4564-b899-ab4c4a63f2b9/2084a365-b662-4564-b899-ab4c4a63f2b9.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 859.772171] env[61978]: DEBUG nova.compute.manager [None req-0e1774ba-8d9d-4243-bcc5-afac81463522 tempest-ServerDiagnosticsTest-2008451569 tempest-ServerDiagnosticsTest-2008451569-project-admin] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 859.773291] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394622, 'name': Rename_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.778990] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1cd9a00a-7914-406e-b0e3-0eb056a1b686 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.782111] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f995ebbf-e4dc-4b9f-8717-8e4088ddd26c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.795424] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.798940] env[61978]: INFO nova.compute.manager [None req-0e1774ba-8d9d-4243-bcc5-afac81463522 tempest-ServerDiagnosticsTest-2008451569 tempest-ServerDiagnosticsTest-2008451569-project-admin] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Retrieving diagnostics [ 859.800022] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1316e6-9781-4fc4-89d6-bb8fb3c16a9e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.804595] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Waiting for the task: (returnval){ [ 859.804595] env[61978]: value = "task-1394625" [ 859.804595] env[61978]: _type = "Task" [ 859.804595] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.852052] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394625, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.858265] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.011215] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908460ad-34f4-4fd9-85c9-ad04a73164e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.021822] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa561591-d4fc-4f3e-aafb-d72a4f9b8bce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.060860] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f348362-bfe5-427b-8f96-c32d93bf7840 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.070767] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c792fc04-4a6e-49b5-9e1a-e11e2aac0a98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.088026] env[61978]: DEBUG nova.compute.provider_tree [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.091974] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Acquiring lock "refresh_cache-eb7cb200-c162-4e92-8916-6d9abd5cf34d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.092138] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Acquired lock "refresh_cache-eb7cb200-c162-4e92-8916-6d9abd5cf34d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.092297] env[61978]: DEBUG nova.network.neutron [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 860.136167] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394623, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501958} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.136431] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 85fc5af8-454d-4042-841a-945b7e84eb6c/85fc5af8-454d-4042-841a-945b7e84eb6c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 860.136642] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 860.137252] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11171fa8-2155-4a61-bdba-217a18f73166 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.145400] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 860.145400] env[61978]: value = "task-1394626" [ 860.145400] env[61978]: _type = "Task" [ 860.145400] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.161224] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.170983] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.240613] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394622, 'name': Rename_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.245800] env[61978]: DEBUG nova.network.neutron [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Updated VIF entry in instance network info cache for port 5cb15476-a062-4b08-8f77-6955b8086740. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 860.246031] env[61978]: DEBUG nova.network.neutron [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Updating instance_info_cache with network_info: [{"id": "5cb15476-a062-4b08-8f77-6955b8086740", "address": "fa:16:3e:d1:bb:14", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5cb15476-a0", "ovs_interfaceid": "5cb15476-a062-4b08-8f77-6955b8086740", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.269344] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.304064] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.317961] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394625, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.353463] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.499605] env[61978]: DEBUG nova.compute.manager [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 860.537099] env[61978]: DEBUG nova.virt.hardware [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T15:03:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1238508976',id=31,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1550618963',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 860.541027] env[61978]: DEBUG nova.virt.hardware [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 860.541027] env[61978]: DEBUG nova.virt.hardware [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 860.541027] env[61978]: DEBUG nova.virt.hardware [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 860.541027] env[61978]: DEBUG nova.virt.hardware [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 860.541027] env[61978]: DEBUG nova.virt.hardware [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 860.541440] env[61978]: DEBUG nova.virt.hardware [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 860.541440] env[61978]: DEBUG nova.virt.hardware [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 860.541440] env[61978]: DEBUG nova.virt.hardware [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 860.541440] env[61978]: DEBUG nova.virt.hardware [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 860.541440] env[61978]: DEBUG nova.virt.hardware [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 860.541685] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685186c0-24b7-4d80-83f2-a6709b555b3a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.550194] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fe60b9-78ec-476a-8b21-61cc64724ed9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.595446] env[61978]: DEBUG nova.scheduler.client.report [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 860.654644] env[61978]: DEBUG nova.network.neutron [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 860.660515] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07009} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.661497] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 860.662474] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28da8371-4673-45db-9c3c-fdc2bcb879a1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.685675] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 85fc5af8-454d-4042-841a-945b7e84eb6c/85fc5af8-454d-4042-841a-945b7e84eb6c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 860.688448] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-814ece17-e751-484a-904c-28abd5336a96 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.712786] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 860.712786] env[61978]: value = "task-1394627" [ 860.712786] env[61978]: _type = "Task" [ 860.712786] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.724020] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394627, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.737857] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394622, 'name': Rename_Task, 'duration_secs': 1.278397} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.738121] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 860.738719] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c11f44f-8463-40f6-9cf1-02223de28aed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.746684] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 860.746684] env[61978]: value = "task-1394628" [ 860.746684] env[61978]: _type = "Task" [ 860.746684] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.764224] env[61978]: DEBUG oslo_concurrency.lockutils [req-f783ab4f-c2d1-4842-b556-b17492975018 req-e8682da2-5b25-4c0a-a8a2-8d93a4882e7e service nova] Releasing lock "refresh_cache-85fc5af8-454d-4042-841a-945b7e84eb6c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.770467] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394628, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.774024] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.818688] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394625, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.744663} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.819351] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 2084a365-b662-4564-b899-ab4c4a63f2b9/2084a365-b662-4564-b899-ab4c4a63f2b9.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 860.819351] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 860.819511] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de43b527-688b-4d0a-9543-0990232b5d61 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.825769] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Waiting for the task: (returnval){ [ 860.825769] env[61978]: value = "task-1394629" [ 860.825769] env[61978]: _type = "Task" [ 860.825769] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.834424] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.848764] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.936139] env[61978]: DEBUG nova.network.neutron [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Updating instance_info_cache with network_info: [{"id": "82595737-f96a-45c3-9bcc-2642e53bdaec", "address": "fa:16:3e:b5:82:28", "network": {"id": "05b0f51b-9661-4555-9763-fcc5acf38ace", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-393436776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51fc76950ff44b69f26a6977be315bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82595737-f9", "ovs_interfaceid": "82595737-f96a-45c3-9bcc-2642e53bdaec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.972344] env[61978]: DEBUG nova.compute.manager [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Received event network-vif-plugged-a9f70cfc-477a-44cc-8077-b23baa39cda7 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 860.972566] env[61978]: DEBUG oslo_concurrency.lockutils [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] Acquiring lock "2084a365-b662-4564-b899-ab4c4a63f2b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.972777] env[61978]: DEBUG oslo_concurrency.lockutils [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] Lock "2084a365-b662-4564-b899-ab4c4a63f2b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.972944] env[61978]: DEBUG oslo_concurrency.lockutils [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] Lock "2084a365-b662-4564-b899-ab4c4a63f2b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.973128] env[61978]: DEBUG nova.compute.manager [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] No waiting events found dispatching network-vif-plugged-a9f70cfc-477a-44cc-8077-b23baa39cda7 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 860.973294] env[61978]: WARNING nova.compute.manager [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Received unexpected event network-vif-plugged-a9f70cfc-477a-44cc-8077-b23baa39cda7 for instance with vm_state building and task_state spawning. [ 860.973457] env[61978]: DEBUG nova.compute.manager [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Received event network-changed-a9f70cfc-477a-44cc-8077-b23baa39cda7 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 860.973610] env[61978]: DEBUG nova.compute.manager [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Refreshing instance network info cache due to event network-changed-a9f70cfc-477a-44cc-8077-b23baa39cda7. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 860.973807] env[61978]: DEBUG oslo_concurrency.lockutils [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] Acquiring lock "refresh_cache-2084a365-b662-4564-b899-ab4c4a63f2b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.973954] env[61978]: DEBUG oslo_concurrency.lockutils [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] Acquired lock "refresh_cache-2084a365-b662-4564-b899-ab4c4a63f2b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.974129] env[61978]: DEBUG nova.network.neutron [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Refreshing network info cache for port a9f70cfc-477a-44cc-8077-b23baa39cda7 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 861.106764] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.638s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.107583] env[61978]: DEBUG nova.compute.manager [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 861.113100] env[61978]: DEBUG oslo_concurrency.lockutils [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.206s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.116025] env[61978]: DEBUG nova.objects.instance [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lazy-loading 'resources' on Instance uuid 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 861.210504] env[61978]: DEBUG nova.network.neutron [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Successfully updated port: 1d9ac1c2-acc1-4cef-8a1e-445797f69a52 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 861.225900] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394627, 'name': ReconfigVM_Task, 'duration_secs': 0.308323} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.226200] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 85fc5af8-454d-4042-841a-945b7e84eb6c/85fc5af8-454d-4042-841a-945b7e84eb6c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 861.226840] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb03d654-810e-42b6-a326-15c91fa9c77c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.234245] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 861.234245] env[61978]: value = "task-1394630" [ 861.234245] env[61978]: _type = "Task" [ 861.234245] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.242657] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394630, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.263570] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394628, 'name': PowerOnVM_Task} progress is 37%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.271456] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.335542] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074283} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.335934] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 861.336849] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d651d7c3-dbe5-438e-a6f3-5fbd031c193e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.360720] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] 2084a365-b662-4564-b899-ab4c4a63f2b9/2084a365-b662-4564-b899-ab4c4a63f2b9.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 861.361897] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b94c8fed-4a62-4f8c-a09d-6ad345310e26 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.380645] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.385441] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Waiting for the task: (returnval){ [ 861.385441] env[61978]: value = "task-1394631" [ 861.385441] env[61978]: _type = "Task" [ 861.385441] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.395125] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394631, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.439149] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Releasing lock "refresh_cache-eb7cb200-c162-4e92-8916-6d9abd5cf34d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.439509] env[61978]: DEBUG nova.compute.manager [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Instance network_info: |[{"id": "82595737-f96a-45c3-9bcc-2642e53bdaec", "address": "fa:16:3e:b5:82:28", "network": {"id": "05b0f51b-9661-4555-9763-fcc5acf38ace", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-393436776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51fc76950ff44b69f26a6977be315bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82595737-f9", "ovs_interfaceid": "82595737-f96a-45c3-9bcc-2642e53bdaec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 861.439939] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:82:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '82595737-f96a-45c3-9bcc-2642e53bdaec', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.447604] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Creating folder: Project (b51fc76950ff44b69f26a6977be315bc). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.447949] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a7de77b-43fe-421f-8078-ef1420664d05 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.458572] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Created folder: Project (b51fc76950ff44b69f26a6977be315bc) in parent group-v295764. [ 861.458818] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Creating folder: Instances. Parent ref: group-v295800. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.459106] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62d4dabf-46ac-404d-bd4e-7a675e3339e2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.467908] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Created folder: Instances in parent group-v295800. [ 861.468210] env[61978]: DEBUG oslo.service.loopingcall [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 861.468446] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 861.468681] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57bc0a4c-1f95-4bf7-9236-7b506cdc1daa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.490954] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.490954] env[61978]: value = "task-1394634" [ 861.490954] env[61978]: _type = "Task" [ 861.490954] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.498602] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394634, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.617176] env[61978]: DEBUG nova.compute.utils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 861.618996] env[61978]: DEBUG nova.compute.manager [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 861.618996] env[61978]: DEBUG nova.network.neutron [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 861.628087] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Acquiring lock "5d48e854-45fd-4767-91b7-100f84bdca55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.628087] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Lock "5d48e854-45fd-4767-91b7-100f84bdca55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.628087] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Acquiring lock "5d48e854-45fd-4767-91b7-100f84bdca55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.628087] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Lock "5d48e854-45fd-4767-91b7-100f84bdca55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.628652] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Lock "5d48e854-45fd-4767-91b7-100f84bdca55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.634042] env[61978]: INFO nova.compute.manager [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Terminating instance [ 861.637019] env[61978]: DEBUG nova.compute.manager [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 861.637292] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.643323] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cafa12c-36a9-47db-84cf-920ea8e78c82 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.657251] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.657251] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20ee0ddf-ab36-4a23-883d-ed2b7508c1c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.663316] env[61978]: DEBUG oslo_vmware.api [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 861.663316] env[61978]: value = "task-1394635" [ 861.663316] env[61978]: _type = "Task" [ 861.663316] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.684818] env[61978]: DEBUG oslo_vmware.api [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394635, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.702810] env[61978]: DEBUG nova.policy [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74b99d5453e243ada5c84c82947dcba7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00c674bbf1e945ba946d844f9856fdfc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 861.714621] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "refresh_cache-371ddf66-a39b-41c4-bbd1-2a1c1b99834e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.714912] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquired lock "refresh_cache-371ddf66-a39b-41c4-bbd1-2a1c1b99834e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.714912] env[61978]: DEBUG nova.network.neutron [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 861.749456] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394630, 'name': Rename_Task, 'duration_secs': 0.179182} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.753608] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 861.759477] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f648388-980f-48e8-aba1-b27d04b6dcaf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.774685] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 861.774685] env[61978]: value = "task-1394636" [ 861.774685] env[61978]: _type = "Task" [ 861.774685] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.775130] env[61978]: DEBUG oslo_vmware.api [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394628, 'name': PowerOnVM_Task, 'duration_secs': 0.971591} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.779079] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 861.779079] env[61978]: INFO nova.compute.manager [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Took 16.17 seconds to spawn the instance on the hypervisor. [ 861.779205] env[61978]: DEBUG nova.compute.manager [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 861.786435] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c60395-c48d-4026-a409-912c1e75ed6d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.791856] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.799602] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394636, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.823364] env[61978]: DEBUG nova.network.neutron [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Updated VIF entry in instance network info cache for port a9f70cfc-477a-44cc-8077-b23baa39cda7. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 861.823735] env[61978]: DEBUG nova.network.neutron [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Updating instance_info_cache with network_info: [{"id": "a9f70cfc-477a-44cc-8077-b23baa39cda7", "address": "fa:16:3e:6f:c4:46", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9f70cfc-47", "ovs_interfaceid": "a9f70cfc-477a-44cc-8077-b23baa39cda7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.855141] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.906343] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394631, 'name': ReconfigVM_Task, 'duration_secs': 0.407856} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.906663] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Reconfigured VM instance instance-0000000c to attach disk [datastore2] 2084a365-b662-4564-b899-ab4c4a63f2b9/2084a365-b662-4564-b899-ab4c4a63f2b9.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 861.907815] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f4b54af-b408-4bd7-8dc4-4ca5d703cae7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.917983] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Waiting for the task: (returnval){ [ 861.917983] env[61978]: value = "task-1394637" [ 861.917983] env[61978]: _type = "Task" [ 861.917983] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.928111] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394637, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.004739] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394634, 'name': CreateVM_Task, 'duration_secs': 0.505212} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.008522] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 862.010240] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.010240] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.010482] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 862.012738] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a86b7dca-cf7e-4c6b-bcc4-a1c6258f7952 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.016552] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Waiting for the task: (returnval){ [ 862.016552] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ddda78-a239-cae5-bb2c-6ffe12f32c74" [ 862.016552] env[61978]: _type = "Task" [ 862.016552] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.028752] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ddda78-a239-cae5-bb2c-6ffe12f32c74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.087611] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aacf92c-da7c-4b9c-9fd8-e9020827b803 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.096574] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cfb0292-beea-424f-ab53-4acfe3ef21f4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.132865] env[61978]: DEBUG nova.compute.manager [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 862.137255] env[61978]: DEBUG nova.network.neutron [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Successfully created port: 51f0ce4a-1710-4256-9ca8-ac173927565b {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 862.140145] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af5a1b2-534f-41e7-ac9f-8a105d57939a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.148621] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730f0066-c22b-44eb-ab01-a81c6b2e2134 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.164084] env[61978]: DEBUG nova.compute.provider_tree [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.176427] env[61978]: DEBUG oslo_vmware.api [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394635, 'name': PowerOffVM_Task, 'duration_secs': 0.331361} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.176427] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.176427] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.176427] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf28b8b3-aa1a-4247-87b4-22679da48f78 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.273247] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.289931] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394636, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.297959] env[61978]: DEBUG nova.network.neutron [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 862.320598] env[61978]: INFO nova.compute.manager [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Took 31.48 seconds to build instance. [ 862.328408] env[61978]: DEBUG oslo_concurrency.lockutils [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] Releasing lock "refresh_cache-2084a365-b662-4564-b899-ab4c4a63f2b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.328890] env[61978]: DEBUG nova.compute.manager [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Received event network-vif-deleted-11772575-2be7-44fb-b865-5690433fd23d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 862.328985] env[61978]: DEBUG nova.compute.manager [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Received event network-vif-plugged-82595737-f96a-45c3-9bcc-2642e53bdaec {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 862.329179] env[61978]: DEBUG oslo_concurrency.lockutils [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] Acquiring lock "eb7cb200-c162-4e92-8916-6d9abd5cf34d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.329439] env[61978]: DEBUG oslo_concurrency.lockutils [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] Lock "eb7cb200-c162-4e92-8916-6d9abd5cf34d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.329656] env[61978]: DEBUG oslo_concurrency.lockutils [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] Lock "eb7cb200-c162-4e92-8916-6d9abd5cf34d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.329865] env[61978]: DEBUG nova.compute.manager [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] No waiting events found dispatching network-vif-plugged-82595737-f96a-45c3-9bcc-2642e53bdaec {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 862.330112] env[61978]: WARNING nova.compute.manager [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Received unexpected event network-vif-plugged-82595737-f96a-45c3-9bcc-2642e53bdaec for instance with vm_state building and task_state spawning. [ 862.330331] env[61978]: DEBUG nova.compute.manager [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Received event network-changed-82595737-f96a-45c3-9bcc-2642e53bdaec {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 862.330515] env[61978]: DEBUG nova.compute.manager [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Refreshing instance network info cache due to event network-changed-82595737-f96a-45c3-9bcc-2642e53bdaec. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 862.330738] env[61978]: DEBUG oslo_concurrency.lockutils [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] Acquiring lock "refresh_cache-eb7cb200-c162-4e92-8916-6d9abd5cf34d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.330909] env[61978]: DEBUG oslo_concurrency.lockutils [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] Acquired lock "refresh_cache-eb7cb200-c162-4e92-8916-6d9abd5cf34d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.331176] env[61978]: DEBUG nova.network.neutron [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Refreshing network info cache for port 82595737-f96a-45c3-9bcc-2642e53bdaec {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 862.351730] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.398847] env[61978]: INFO nova.compute.manager [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Rebuilding instance [ 862.435091] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394637, 'name': Rename_Task, 'duration_secs': 0.137852} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.439972] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 862.441410] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39764ebb-9298-4ad2-9800-5c978be58246 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.449051] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Waiting for the task: (returnval){ [ 862.449051] env[61978]: value = "task-1394639" [ 862.449051] env[61978]: _type = "Task" [ 862.449051] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.457206] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394639, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.463083] env[61978]: DEBUG nova.compute.manager [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 862.464102] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf8a7cf-2173-474b-91f1-635a93d775bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.528353] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ddda78-a239-cae5-bb2c-6ffe12f32c74, 'name': SearchDatastore_Task, 'duration_secs': 0.012478} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.528744] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.529404] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 862.529861] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.529964] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.530117] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 862.530662] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bdf77512-644e-4c67-af31-6a569291c80e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.540118] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 862.540301] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 862.541416] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-243ad3dd-c917-4749-b9ef-46be38ee84f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.547401] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Waiting for the task: (returnval){ [ 862.547401] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5271e2be-cbd1-5ed8-6aad-4603dfdea4a7" [ 862.547401] env[61978]: _type = "Task" [ 862.547401] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.557216] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5271e2be-cbd1-5ed8-6aad-4603dfdea4a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.672834] env[61978]: DEBUG nova.scheduler.client.report [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 862.676958] env[61978]: DEBUG nova.network.neutron [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Updating instance_info_cache with network_info: [{"id": "1d9ac1c2-acc1-4cef-8a1e-445797f69a52", "address": "fa:16:3e:da:2b:8c", "network": {"id": "c16671c6-39f3-456e-b159-3af6a9553564", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1089542181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add5612301884f668bbe80681629e8d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d9ac1c2-ac", "ovs_interfaceid": "1d9ac1c2-acc1-4cef-8a1e-445797f69a52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.774523] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.791888] env[61978]: DEBUG oslo_vmware.api [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394636, 'name': PowerOnVM_Task, 'duration_secs': 0.655825} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.791888] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 862.791888] env[61978]: INFO nova.compute.manager [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Took 9.79 seconds to spawn the instance on the hypervisor. [ 862.792091] env[61978]: DEBUG nova.compute.manager [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 862.793072] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49169c07-9096-46f7-8d85-7c5b3c058614 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.824277] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f3c53ea-52ce-4e1b-a2e9-023abce32112 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "b26a4784-698d-477a-8db7-58156899d231" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.005s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.854404] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.962051] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394639, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.976689] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 862.976896] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c87afa5c-c19d-49e4-9737-e47ad66a6f60 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.987571] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Waiting for the task: (returnval){ [ 862.987571] env[61978]: value = "task-1394640" [ 862.987571] env[61978]: _type = "Task" [ 862.987571] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.996792] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.060934] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5271e2be-cbd1-5ed8-6aad-4603dfdea4a7, 'name': SearchDatastore_Task, 'duration_secs': 0.0114} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.061973] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5705ba27-9758-4983-a788-75993d49745a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.066875] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Waiting for the task: (returnval){ [ 863.066875] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fcbf29-2fcf-2ed2-cf59-6620a1e649b1" [ 863.066875] env[61978]: _type = "Task" [ 863.066875] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.075374] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fcbf29-2fcf-2ed2-cf59-6620a1e649b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.143528] env[61978]: DEBUG nova.compute.manager [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 863.188145] env[61978]: DEBUG nova.virt.hardware [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 863.188145] env[61978]: DEBUG nova.virt.hardware [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 863.188145] env[61978]: DEBUG nova.virt.hardware [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.188448] env[61978]: DEBUG nova.virt.hardware [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 863.188448] env[61978]: DEBUG nova.virt.hardware [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.188448] env[61978]: DEBUG nova.virt.hardware [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 863.188448] env[61978]: DEBUG nova.virt.hardware [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 863.188601] env[61978]: DEBUG nova.virt.hardware [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 863.189210] env[61978]: DEBUG nova.virt.hardware [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 863.189210] env[61978]: DEBUG nova.virt.hardware [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 863.189210] env[61978]: DEBUG nova.virt.hardware [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 863.194259] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Releasing lock "refresh_cache-371ddf66-a39b-41c4-bbd1-2a1c1b99834e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.194259] env[61978]: DEBUG nova.compute.manager [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Instance network_info: |[{"id": "1d9ac1c2-acc1-4cef-8a1e-445797f69a52", "address": "fa:16:3e:da:2b:8c", "network": {"id": "c16671c6-39f3-456e-b159-3af6a9553564", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1089542181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add5612301884f668bbe80681629e8d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d9ac1c2-ac", "ovs_interfaceid": "1d9ac1c2-acc1-4cef-8a1e-445797f69a52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 863.197313] env[61978]: DEBUG oslo_concurrency.lockutils [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.081s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.199794] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0423216c-2440-40c1-8b8a-5a0a2c5226d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.203123] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:2b:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dced2f3d-7fd3-4a42-836d-9f02dab4c949', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d9ac1c2-acc1-4cef-8a1e-445797f69a52', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 863.210460] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Creating folder: Project (add5612301884f668bbe80681629e8d5). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.211214] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.564s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.212703] env[61978]: INFO nova.compute.claims [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 863.215703] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aefbaf9f-5579-40a8-9af2-2cdc11654763 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.230220] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbd72cd-12ad-401a-b061-051541750085 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.236092] env[61978]: INFO nova.scheduler.client.report [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted allocations for instance 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed [ 863.247838] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Created folder: Project (add5612301884f668bbe80681629e8d5) in parent group-v295764. [ 863.248090] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Creating folder: Instances. Parent ref: group-v295803. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.251752] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46589e1c-0b2c-42e5-bf09-187fed7dbc57 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.253524] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 863.253706] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 863.253894] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Deleting the datastore file [datastore1] 5d48e854-45fd-4767-91b7-100f84bdca55 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 863.254361] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee052a28-834a-4756-8567-742694e973c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.261542] env[61978]: DEBUG oslo_vmware.api [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for the task: (returnval){ [ 863.261542] env[61978]: value = "task-1394643" [ 863.261542] env[61978]: _type = "Task" [ 863.261542] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.267278] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Created folder: Instances in parent group-v295803. [ 863.267526] env[61978]: DEBUG oslo.service.loopingcall [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 863.274852] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 863.274852] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-020d1897-8558-4202-827e-553fef2e15d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.292285] env[61978]: DEBUG oslo_vmware.api [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.298186] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.298482] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 863.298482] env[61978]: value = "task-1394644" [ 863.298482] env[61978]: _type = "Task" [ 863.298482] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.312611] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394644, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.315131] env[61978]: INFO nova.compute.manager [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Took 21.36 seconds to build instance. [ 863.326663] env[61978]: DEBUG nova.compute.manager [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 863.361791] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.470037] env[61978]: DEBUG oslo_vmware.api [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394639, 'name': PowerOnVM_Task, 'duration_secs': 0.573368} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.470348] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 863.470566] env[61978]: INFO nova.compute.manager [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Took 8.10 seconds to spawn the instance on the hypervisor. [ 863.470742] env[61978]: DEBUG nova.compute.manager [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 863.471573] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0629704-4f60-4bd4-a8b5-30c29cc957c7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.501532] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394640, 'name': PowerOffVM_Task, 'duration_secs': 0.160967} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.501869] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 863.502129] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 863.503532] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20d7f2c-7a4b-4aca-8274-8817b736278b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.513438] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 863.513703] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-140d74d0-03be-44d9-93fb-8103e6ffcd57 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.540294] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 863.540507] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 863.540897] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Deleting the datastore file [datastore1] 96a38ed0-c880-4f21-9389-99f039279072 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 863.540975] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e687507d-1f30-43f7-82d4-4471abfa792b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.550244] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Waiting for the task: (returnval){ [ 863.550244] env[61978]: value = "task-1394646" [ 863.550244] env[61978]: _type = "Task" [ 863.550244] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.563210] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.581800] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fcbf29-2fcf-2ed2-cf59-6620a1e649b1, 'name': SearchDatastore_Task, 'duration_secs': 0.01396} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.581800] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.581800] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] eb7cb200-c162-4e92-8916-6d9abd5cf34d/eb7cb200-c162-4e92-8916-6d9abd5cf34d.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 863.581800] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-940028d7-1e67-41af-8266-7d49dae1a142 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.584545] env[61978]: DEBUG nova.network.neutron [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Updated VIF entry in instance network info cache for port 82595737-f96a-45c3-9bcc-2642e53bdaec. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 863.584545] env[61978]: DEBUG nova.network.neutron [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Updating instance_info_cache with network_info: [{"id": "82595737-f96a-45c3-9bcc-2642e53bdaec", "address": "fa:16:3e:b5:82:28", "network": {"id": "05b0f51b-9661-4555-9763-fcc5acf38ace", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-393436776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51fc76950ff44b69f26a6977be315bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82595737-f9", "ovs_interfaceid": "82595737-f96a-45c3-9bcc-2642e53bdaec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.591020] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Waiting for the task: (returnval){ [ 863.591020] env[61978]: value = "task-1394647" [ 863.591020] env[61978]: _type = "Task" [ 863.591020] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.600294] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1394647, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.759629] env[61978]: DEBUG oslo_concurrency.lockutils [None req-476ee8e4-91eb-43d3-93db-3720b2fed698 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "892b03e7-a9fc-4b53-bffd-d8b090cbb9ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.454s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.777246] env[61978]: DEBUG oslo_vmware.api [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.786206] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.810698] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394644, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.816761] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3b96dc4-3021-4e0b-9a1c-a6f1f69467fd tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "85fc5af8-454d-4042-841a-945b7e84eb6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.945s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.852381] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.858907] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.991663] env[61978]: INFO nova.compute.manager [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Took 21.07 seconds to build instance. [ 864.064636] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.087957] env[61978]: DEBUG oslo_concurrency.lockutils [req-4abe3ddf-1c5e-4e9e-86c6-fe8f096ccce3 req-d807b08b-fb9f-4938-9319-3024449c6251 service nova] Releasing lock "refresh_cache-eb7cb200-c162-4e92-8916-6d9abd5cf34d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.108131] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1394647, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.288950] env[61978]: DEBUG oslo_vmware.api [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.288950] env[61978]: DEBUG nova.network.neutron [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Successfully updated port: 51f0ce4a-1710-4256-9ca8-ac173927565b {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 864.300778] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.311792] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394644, 'name': CreateVM_Task, 'duration_secs': 0.583097} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.312032] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 864.313319] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.313537] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.317481] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 864.317717] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c32ee1d-12eb-4ab2-b77a-67a3292130f8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.322391] env[61978]: DEBUG nova.compute.manager [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 864.329577] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 864.329577] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521d8f4d-2fe9-0cd6-d467-f46d131445f9" [ 864.329577] env[61978]: _type = "Task" [ 864.329577] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.340765] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521d8f4d-2fe9-0cd6-d467-f46d131445f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.363832] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.445839] env[61978]: DEBUG nova.compute.manager [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Received event network-vif-plugged-1d9ac1c2-acc1-4cef-8a1e-445797f69a52 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 864.446484] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] Acquiring lock "371ddf66-a39b-41c4-bbd1-2a1c1b99834e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.447987] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] Lock "371ddf66-a39b-41c4-bbd1-2a1c1b99834e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.447987] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] Lock "371ddf66-a39b-41c4-bbd1-2a1c1b99834e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.447987] env[61978]: DEBUG nova.compute.manager [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] No waiting events found dispatching network-vif-plugged-1d9ac1c2-acc1-4cef-8a1e-445797f69a52 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 864.447987] env[61978]: WARNING nova.compute.manager [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Received unexpected event network-vif-plugged-1d9ac1c2-acc1-4cef-8a1e-445797f69a52 for instance with vm_state building and task_state spawning. [ 864.447987] env[61978]: DEBUG nova.compute.manager [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Received event network-changed-1d9ac1c2-acc1-4cef-8a1e-445797f69a52 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 864.448852] env[61978]: DEBUG nova.compute.manager [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Refreshing instance network info cache due to event network-changed-1d9ac1c2-acc1-4cef-8a1e-445797f69a52. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 864.448852] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] Acquiring lock "refresh_cache-371ddf66-a39b-41c4-bbd1-2a1c1b99834e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.448852] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] Acquired lock "refresh_cache-371ddf66-a39b-41c4-bbd1-2a1c1b99834e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.448852] env[61978]: DEBUG nova.network.neutron [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Refreshing network info cache for port 1d9ac1c2-acc1-4cef-8a1e-445797f69a52 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 864.493535] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8391095f-d1d6-4445-9097-36cd8ec02d92 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Lock "2084a365-b662-4564-b899-ab4c4a63f2b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.296s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.570372] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.951168} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.571249] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 864.571519] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 864.571755] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 864.607759] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1394647, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678369} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.610018] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] eb7cb200-c162-4e92-8916-6d9abd5cf34d/eb7cb200-c162-4e92-8916-6d9abd5cf34d.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 864.610018] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 864.610018] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b40cccd3-4d6d-4b12-bd69-60e4abc25051 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.616212] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Waiting for the task: (returnval){ [ 864.616212] env[61978]: value = "task-1394648" [ 864.616212] env[61978]: _type = "Task" [ 864.616212] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.632874] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1394648, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.691308] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2ab034-c8d0-4881-92bc-a76d4f649ef2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.701214] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a593028c-3438-4db4-82ef-55b853da37ac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.733062] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef70228-b022-4e32-aae7-7445aac6bec6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.741886] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7b61f2-9de1-45db-b688-5382c14b72ba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.758637] env[61978]: DEBUG nova.compute.provider_tree [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.775164] env[61978]: DEBUG oslo_vmware.api [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Task: {'id': task-1394643, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.302} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.783922] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 864.783922] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 864.783922] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 864.784117] env[61978]: INFO nova.compute.manager [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Took 3.15 seconds to destroy the instance on the hypervisor. [ 864.784461] env[61978]: DEBUG oslo.service.loopingcall [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 864.785111] env[61978]: DEBUG nova.compute.manager [-] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 864.785241] env[61978]: DEBUG nova.network.neutron [-] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 864.794129] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.794206] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquired lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.794314] env[61978]: DEBUG nova.network.neutron [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 864.795832] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394624, 'name': ReconfigVM_Task, 'duration_secs': 4.838776} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.796314] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13/92eb5edb-803b-48d4-8c4f-338d7c3b3d13.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 864.797051] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df42bc60-416d-4c26-b99b-78cc8b19812b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.807249] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 864.807249] env[61978]: value = "task-1394649" [ 864.807249] env[61978]: _type = "Task" [ 864.807249] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.820523] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394649, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.848804] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521d8f4d-2fe9-0cd6-d467-f46d131445f9, 'name': SearchDatastore_Task, 'duration_secs': 0.012438} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.848804] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.848804] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.848804] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.848984] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.850383] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.851821] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d37ac47a-f869-45cf-a4d6-0785f779b2ec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.870999] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394617, 'name': CopyVirtualDisk_Task, 'duration_secs': 6.081525} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.872038] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.873085] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 243e7146-46fc-43f4-a83b-cdc58f397f9e/243e7146-46fc-43f4-a83b-cdc58f397f9e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 864.873231] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 864.873509] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.874188] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 864.874462] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4ad0b8a-5a77-4d28-8160-a6af7f1cbf72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.876431] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8e33dbb-8a5a-4b9a-ac9e-36ff258f366c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.882051] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 864.882051] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527ba1f6-7b40-fafc-0929-91b4d050b39c" [ 864.882051] env[61978]: _type = "Task" [ 864.882051] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.883463] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 864.883463] env[61978]: value = "task-1394650" [ 864.883463] env[61978]: _type = "Task" [ 864.883463] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.901942] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527ba1f6-7b40-fafc-0929-91b4d050b39c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.902187] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394650, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.003969] env[61978]: DEBUG nova.compute.manager [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 865.135115] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1394648, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.164941} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.135368] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.136729] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43f8596-e4cd-4a35-8378-6e4ef13e190c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.175757] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] eb7cb200-c162-4e92-8916-6d9abd5cf34d/eb7cb200-c162-4e92-8916-6d9abd5cf34d.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.176657] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4782ae33-044d-4740-878d-adb469b3b3dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.200737] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Waiting for the task: (returnval){ [ 865.200737] env[61978]: value = "task-1394651" [ 865.200737] env[61978]: _type = "Task" [ 865.200737] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.213505] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1394651, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.262951] env[61978]: DEBUG nova.scheduler.client.report [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 865.322904] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394649, 'name': Rename_Task, 'duration_secs': 0.340818} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.323214] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 865.324104] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd5666e1-c97e-44e5-ac9d-b9fc5115e64c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.332968] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 865.332968] env[61978]: value = "task-1394653" [ 865.332968] env[61978]: _type = "Task" [ 865.332968] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.341311] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394653, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.360844] env[61978]: DEBUG nova.network.neutron [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.402757] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527ba1f6-7b40-fafc-0929-91b4d050b39c, 'name': SearchDatastore_Task, 'duration_secs': 0.02173} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.406883] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394650, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068587} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.407024] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff1a5ea4-619f-4a14-b07e-50d2c60a7d34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.409847] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.410886] env[61978]: DEBUG nova.network.neutron [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Updated VIF entry in instance network info cache for port 1d9ac1c2-acc1-4cef-8a1e-445797f69a52. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 865.411533] env[61978]: DEBUG nova.network.neutron [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Updating instance_info_cache with network_info: [{"id": "1d9ac1c2-acc1-4cef-8a1e-445797f69a52", "address": "fa:16:3e:da:2b:8c", "network": {"id": "c16671c6-39f3-456e-b159-3af6a9553564", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1089542181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add5612301884f668bbe80681629e8d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d9ac1c2-ac", "ovs_interfaceid": "1d9ac1c2-acc1-4cef-8a1e-445797f69a52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.413397] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe7437f-462c-48bc-9e8d-59effd5a8a63 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.419547] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 865.419547] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a6b801-fda4-b98e-e767-c10e68414734" [ 865.419547] env[61978]: _type = "Task" [ 865.419547] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.445998] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 243e7146-46fc-43f4-a83b-cdc58f397f9e/243e7146-46fc-43f4-a83b-cdc58f397f9e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.450513] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1db9db6c-6d36-4c67-bfb5-d0d37da9ac3d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.477858] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a6b801-fda4-b98e-e767-c10e68414734, 'name': SearchDatastore_Task, 'duration_secs': 0.026493} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.480432] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.480741] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 371ddf66-a39b-41c4-bbd1-2a1c1b99834e/371ddf66-a39b-41c4-bbd1-2a1c1b99834e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 865.481195] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 865.481195] env[61978]: value = "task-1394657" [ 865.481195] env[61978]: _type = "Task" [ 865.481195] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.485299] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24fa14ec-5e49-49b5-940c-1e332c5983a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.494687] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.497137] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 865.497137] env[61978]: value = "task-1394658" [ 865.497137] env[61978]: _type = "Task" [ 865.497137] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.510455] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394658, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.530047] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.599974] env[61978]: DEBUG nova.network.neutron [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Updating instance_info_cache with network_info: [{"id": "51f0ce4a-1710-4256-9ca8-ac173927565b", "address": "fa:16:3e:8e:77:cc", "network": {"id": "4b7af29e-cc4a-4765-8d05-6adf88573ad3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1006801081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00c674bbf1e945ba946d844f9856fdfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51f0ce4a-17", "ovs_interfaceid": "51f0ce4a-1710-4256-9ca8-ac173927565b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.631948] env[61978]: DEBUG nova.virt.hardware [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 865.633077] env[61978]: DEBUG nova.virt.hardware [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 865.633077] env[61978]: DEBUG nova.virt.hardware [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.633077] env[61978]: DEBUG nova.virt.hardware [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 865.633077] env[61978]: DEBUG nova.virt.hardware [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.633077] env[61978]: DEBUG nova.virt.hardware [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 865.633365] env[61978]: DEBUG nova.virt.hardware [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 865.633406] env[61978]: DEBUG nova.virt.hardware [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 865.633622] env[61978]: DEBUG nova.virt.hardware [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 865.634361] env[61978]: DEBUG nova.virt.hardware [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 865.634361] env[61978]: DEBUG nova.virt.hardware [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 865.635436] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b24cf40-5fdc-43ce-b134-c681e269b7bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.645830] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366cddf8-7b9b-47e7-b22a-123db2638464 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.660727] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Instance VIF info [] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.666943] env[61978]: DEBUG oslo.service.loopingcall [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.667430] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.667560] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b2f3e72-56a1-41db-92f8-4affda3e79a4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.687691] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.687691] env[61978]: value = "task-1394659" [ 865.687691] env[61978]: _type = "Task" [ 865.687691] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.697349] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394659, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.711491] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1394651, 'name': ReconfigVM_Task, 'duration_secs': 0.274294} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.711926] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Reconfigured VM instance instance-0000000d to attach disk [datastore2] eb7cb200-c162-4e92-8916-6d9abd5cf34d/eb7cb200-c162-4e92-8916-6d9abd5cf34d.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.712781] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05376ae5-428e-4120-bb6c-426bf458f487 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.720260] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Waiting for the task: (returnval){ [ 865.720260] env[61978]: value = "task-1394660" [ 865.720260] env[61978]: _type = "Task" [ 865.720260] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.732997] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1394660, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.768657] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.557s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.768657] env[61978]: DEBUG nova.compute.manager [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 865.771638] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.269s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.771783] env[61978]: DEBUG nova.objects.instance [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Lazy-loading 'resources' on Instance uuid e249c706-3196-4593-ae96-53f2619e0243 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.846851] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394653, 'name': PowerOnVM_Task} progress is 1%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.858718] env[61978]: DEBUG nova.network.neutron [-] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.917507] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae38dc2e-89a8-40dc-b12e-44c9e3e4a5b1 req-15578cb0-37c3-4f7f-acc8-27ba803e7193 service nova] Releasing lock "refresh_cache-371ddf66-a39b-41c4-bbd1-2a1c1b99834e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.928381] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "76dff032-a806-4910-a48b-8850b05131c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.928951] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "76dff032-a806-4910-a48b-8850b05131c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.997622] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394657, 'name': ReconfigVM_Task, 'duration_secs': 0.424292} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.997841] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 243e7146-46fc-43f4-a83b-cdc58f397f9e/243e7146-46fc-43f4-a83b-cdc58f397f9e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.003018] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6073d15-b126-4570-b4be-8ad7997517c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.013477] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394658, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.015709] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 866.015709] env[61978]: value = "task-1394661" [ 866.015709] env[61978]: _type = "Task" [ 866.015709] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.027689] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394661, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.106254] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Releasing lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.106254] env[61978]: DEBUG nova.compute.manager [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Instance network_info: |[{"id": "51f0ce4a-1710-4256-9ca8-ac173927565b", "address": "fa:16:3e:8e:77:cc", "network": {"id": "4b7af29e-cc4a-4765-8d05-6adf88573ad3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1006801081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00c674bbf1e945ba946d844f9856fdfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51f0ce4a-17", "ovs_interfaceid": "51f0ce4a-1710-4256-9ca8-ac173927565b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 866.107117] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:77:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b67e519-46cf-44ce-b670-4ba4c0c5b658', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51f0ce4a-1710-4256-9ca8-ac173927565b', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.114900] env[61978]: DEBUG oslo.service.loopingcall [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.114900] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 866.114900] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7294f6a7-e465-43e7-bf60-757704ec1060 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.138697] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.138697] env[61978]: value = "task-1394662" [ 866.138697] env[61978]: _type = "Task" [ 866.138697] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.152415] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394662, 'name': CreateVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.200925] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394659, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.234464] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1394660, 'name': Rename_Task, 'duration_secs': 0.325668} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.234464] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 866.234554] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc6d1a66-e942-486d-b54e-502fee1c02f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.243445] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Waiting for the task: (returnval){ [ 866.243445] env[61978]: value = "task-1394663" [ 866.243445] env[61978]: _type = "Task" [ 866.243445] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.248653] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1394663, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.276597] env[61978]: DEBUG nova.compute.utils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 866.283880] env[61978]: DEBUG nova.compute.manager [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 866.284118] env[61978]: DEBUG nova.network.neutron [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 866.352325] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394653, 'name': PowerOnVM_Task} progress is 82%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.364125] env[61978]: INFO nova.compute.manager [-] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Took 1.58 seconds to deallocate network for instance. [ 866.366562] env[61978]: DEBUG nova.policy [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a2bd727a5c54823a318eaf1e88f0cf4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c777a4c08854c61beda5832aa015a8d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 866.521899] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394658, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.596776} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.527796] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 371ddf66-a39b-41c4-bbd1-2a1c1b99834e/371ddf66-a39b-41c4-bbd1-2a1c1b99834e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 866.527796] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 866.532020] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b949c7ae-5f7e-4182-9093-597ab8af06fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.535883] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394661, 'name': Rename_Task, 'duration_secs': 0.224245} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.536159] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 866.536393] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f05cab28-a2c5-44b6-bf85-f05d977f96ba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.539517] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 866.539517] env[61978]: value = "task-1394664" [ 866.539517] env[61978]: _type = "Task" [ 866.539517] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.548751] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 866.548751] env[61978]: value = "task-1394665" [ 866.548751] env[61978]: _type = "Task" [ 866.548751] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.555260] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394664, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.561830] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394665, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.648954] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394662, 'name': CreateVM_Task, 'duration_secs': 0.461761} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.655033] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 866.655033] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.655033] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.655033] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 866.655033] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd7e7630-dac8-4df4-bbdb-3f575f3c1878 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.659561] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 866.659561] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52def737-8aa2-7966-6796-da0f497f6e2d" [ 866.659561] env[61978]: _type = "Task" [ 866.659561] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.676290] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52def737-8aa2-7966-6796-da0f497f6e2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.705513] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394659, 'name': CreateVM_Task, 'duration_secs': 0.651715} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.705792] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 866.706376] env[61978]: DEBUG oslo_concurrency.lockutils [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.711718] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c568e11-1791-439d-8055-888337aca738 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.721952] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684d7a9e-6497-44fb-aa1e-b58d93986f0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.761934] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd862065-3fc1-43e1-b0f1-6046788f8c77 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.770662] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1394663, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.774072] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf34921a-86c0-4add-b436-5e1c4871d8e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.790262] env[61978]: DEBUG nova.compute.manager [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 866.793590] env[61978]: DEBUG nova.compute.provider_tree [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.844707] env[61978]: DEBUG nova.network.neutron [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Successfully created port: c8d18564-3f86-41ec-88ac-735b63415259 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 866.858376] env[61978]: DEBUG oslo_vmware.api [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394653, 'name': PowerOnVM_Task, 'duration_secs': 1.174992} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.858687] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.858977] env[61978]: INFO nova.compute.manager [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Took 18.86 seconds to spawn the instance on the hypervisor. [ 866.859249] env[61978]: DEBUG nova.compute.manager [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 866.860786] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5995614e-f085-4abf-9362-53666c24d181 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.880050] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.051096] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394664, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.141296} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.054824] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 867.055664] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4706f5ea-206e-42a8-b793-713aa5b91cac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.065074] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394665, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.085917] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 371ddf66-a39b-41c4-bbd1-2a1c1b99834e/371ddf66-a39b-41c4-bbd1-2a1c1b99834e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 867.086265] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b5d5412-2b10-4ddb-9e32-0a60d0d5d22d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.107811] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 867.107811] env[61978]: value = "task-1394666" [ 867.107811] env[61978]: _type = "Task" [ 867.107811] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.117842] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394666, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.173833] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52def737-8aa2-7966-6796-da0f497f6e2d, 'name': SearchDatastore_Task, 'duration_secs': 0.012127} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.174118] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.174320] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.175671] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.175671] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.175671] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 867.175671] env[61978]: DEBUG oslo_concurrency.lockutils [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.175926] env[61978]: DEBUG oslo_concurrency.lockutils [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 867.176350] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d150768-4811-4c88-8873-c710b47be9c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.178827] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ae76faf-9be3-4b56-875b-0b423b56bf7e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.184530] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Waiting for the task: (returnval){ [ 867.184530] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f9e8c9-4814-e754-ff73-94caa2d603fe" [ 867.184530] env[61978]: _type = "Task" [ 867.184530] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.190410] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.190562] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 867.195256] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f411f18-2941-4fe7-8dc9-143975a0b51d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.199221] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f9e8c9-4814-e754-ff73-94caa2d603fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.203136] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 867.203136] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52856a9b-b545-6980-6f48-262bed75c8ae" [ 867.203136] env[61978]: _type = "Task" [ 867.203136] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.213919] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52856a9b-b545-6980-6f48-262bed75c8ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.266870] env[61978]: DEBUG oslo_vmware.api [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1394663, 'name': PowerOnVM_Task, 'duration_secs': 0.653282} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.267228] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 867.267465] env[61978]: INFO nova.compute.manager [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Took 9.31 seconds to spawn the instance on the hypervisor. [ 867.267718] env[61978]: DEBUG nova.compute.manager [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 867.268435] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667ef7a5-e7e4-4a15-aab1-91698add6a5b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.299561] env[61978]: DEBUG nova.scheduler.client.report [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 867.337478] env[61978]: DEBUG nova.compute.manager [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Stashing vm_state: active {{(pid=61978) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 867.387260] env[61978]: INFO nova.compute.manager [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Took 35.10 seconds to build instance. [ 867.566665] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394665, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.617905] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394666, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.647323] env[61978]: DEBUG nova.compute.manager [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Received event network-vif-plugged-51f0ce4a-1710-4256-9ca8-ac173927565b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 867.647480] env[61978]: DEBUG oslo_concurrency.lockutils [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] Acquiring lock "a0762952-2afd-448a-8e46-ba788a4ca131-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.648681] env[61978]: DEBUG oslo_concurrency.lockutils [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] Lock "a0762952-2afd-448a-8e46-ba788a4ca131-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.648681] env[61978]: DEBUG oslo_concurrency.lockutils [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] Lock "a0762952-2afd-448a-8e46-ba788a4ca131-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.648681] env[61978]: DEBUG nova.compute.manager [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] No waiting events found dispatching network-vif-plugged-51f0ce4a-1710-4256-9ca8-ac173927565b {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 867.648681] env[61978]: WARNING nova.compute.manager [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Received unexpected event network-vif-plugged-51f0ce4a-1710-4256-9ca8-ac173927565b for instance with vm_state building and task_state spawning. [ 867.648681] env[61978]: DEBUG nova.compute.manager [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Received event network-changed-51f0ce4a-1710-4256-9ca8-ac173927565b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 867.649193] env[61978]: DEBUG nova.compute.manager [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Refreshing instance network info cache due to event network-changed-51f0ce4a-1710-4256-9ca8-ac173927565b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 867.649193] env[61978]: DEBUG oslo_concurrency.lockutils [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] Acquiring lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.649193] env[61978]: DEBUG oslo_concurrency.lockutils [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] Acquired lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.649328] env[61978]: DEBUG nova.network.neutron [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Refreshing network info cache for port 51f0ce4a-1710-4256-9ca8-ac173927565b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 867.697176] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f9e8c9-4814-e754-ff73-94caa2d603fe, 'name': SearchDatastore_Task, 'duration_secs': 0.045357} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.697677] env[61978]: DEBUG oslo_concurrency.lockutils [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.697781] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.698024] env[61978]: DEBUG oslo_concurrency.lockutils [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.717024] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52856a9b-b545-6980-6f48-262bed75c8ae, 'name': SearchDatastore_Task, 'duration_secs': 0.035447} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.717024] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-055911c8-80e8-481c-b91f-aa7278c3e951 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.723213] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 867.723213] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524ddeed-d8b2-b8c1-24ed-e5bf07742642" [ 867.723213] env[61978]: _type = "Task" [ 867.723213] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.733477] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524ddeed-d8b2-b8c1-24ed-e5bf07742642, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.789743] env[61978]: INFO nova.compute.manager [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Took 23.16 seconds to build instance. [ 867.805012] env[61978]: DEBUG nova.compute.manager [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 867.811092] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.040s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.815981] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.062s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.815981] env[61978]: DEBUG nova.objects.instance [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Lazy-loading 'resources' on Instance uuid 66ee1fd7-40f7-461f-b0c6-5951a58ac660 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 867.847660] env[61978]: INFO nova.scheduler.client.report [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Deleted allocations for instance e249c706-3196-4593-ae96-53f2619e0243 [ 867.855502] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.860222] env[61978]: DEBUG nova.virt.hardware [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 867.860568] env[61978]: DEBUG nova.virt.hardware [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 867.860928] env[61978]: DEBUG nova.virt.hardware [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 867.860928] env[61978]: DEBUG nova.virt.hardware [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 867.861136] env[61978]: DEBUG nova.virt.hardware [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 867.861330] env[61978]: DEBUG nova.virt.hardware [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 867.861748] env[61978]: DEBUG nova.virt.hardware [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 867.861946] env[61978]: DEBUG nova.virt.hardware [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 867.862173] env[61978]: DEBUG nova.virt.hardware [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 867.862370] env[61978]: DEBUG nova.virt.hardware [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 867.862574] env[61978]: DEBUG nova.virt.hardware [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 867.863771] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053aa0a4-150e-4118-8298-98b9caec2987 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.874136] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ada487-a371-4948-b46f-8190c18dcf9f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.889597] env[61978]: DEBUG oslo_concurrency.lockutils [None req-58a1929c-29aa-4a89-a726-c855ab7d95ff tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "92eb5edb-803b-48d4-8c4f-338d7c3b3d13" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.642s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.039740] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Acquiring lock "2084a365-b662-4564-b899-ab4c4a63f2b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.040168] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Lock "2084a365-b662-4564-b899-ab4c4a63f2b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.040483] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Acquiring lock "2084a365-b662-4564-b899-ab4c4a63f2b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.040768] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Lock "2084a365-b662-4564-b899-ab4c4a63f2b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.041043] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Lock "2084a365-b662-4564-b899-ab4c4a63f2b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.045225] env[61978]: INFO nova.compute.manager [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Terminating instance [ 868.047694] env[61978]: DEBUG nova.compute.manager [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 868.048174] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 868.049386] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e46c42f-048b-45fb-9241-013356151d62 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.065278] env[61978]: DEBUG oslo_vmware.api [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1394665, 'name': PowerOnVM_Task, 'duration_secs': 1.06532} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.068131] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 868.068429] env[61978]: INFO nova.compute.manager [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Took 17.58 seconds to spawn the instance on the hypervisor. [ 868.068694] env[61978]: DEBUG nova.compute.manager [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 868.069175] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.070174] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d16083-090e-420b-8d56-c2111abbb20c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.073576] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13a1aac1-9652-400c-9c90-c88d8260b18d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.085407] env[61978]: DEBUG oslo_vmware.api [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Waiting for the task: (returnval){ [ 868.085407] env[61978]: value = "task-1394667" [ 868.085407] env[61978]: _type = "Task" [ 868.085407] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.099379] env[61978]: DEBUG oslo_vmware.api [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394667, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.123268] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394666, 'name': ReconfigVM_Task, 'duration_secs': 0.699164} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.123670] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 371ddf66-a39b-41c4-bbd1-2a1c1b99834e/371ddf66-a39b-41c4-bbd1-2a1c1b99834e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 868.124563] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ef691b5f-2959-4ba2-8ec1-c0a9bc1533db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.132436] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 868.132436] env[61978]: value = "task-1394668" [ 868.132436] env[61978]: _type = "Task" [ 868.132436] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.143088] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394668, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.235605] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524ddeed-d8b2-b8c1-24ed-e5bf07742642, 'name': SearchDatastore_Task, 'duration_secs': 0.024352} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.235986] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.236320] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a0762952-2afd-448a-8e46-ba788a4ca131/a0762952-2afd-448a-8e46-ba788a4ca131.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 868.236614] env[61978]: DEBUG oslo_concurrency.lockutils [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.236837] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.237072] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f74ca44-3c67-4e93-b72d-c29535a7c6ca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.241362] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53688786-d07c-47f9-ab70-8dfbab83c0c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.249291] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 868.249291] env[61978]: value = "task-1394669" [ 868.249291] env[61978]: _type = "Task" [ 868.249291] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.254365] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.254605] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.255842] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a55b9cd8-fba5-4000-8437-577ec6825604 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.262073] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.266149] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Waiting for the task: (returnval){ [ 868.266149] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5269490b-9753-b7b0-ad5c-4168bf795b36" [ 868.266149] env[61978]: _type = "Task" [ 868.266149] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.279726] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5269490b-9753-b7b0-ad5c-4168bf795b36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.294397] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8b0bb830-9562-449d-86ce-35d331ba998b tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Lock "eb7cb200-c162-4e92-8916-6d9abd5cf34d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.984s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.358077] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8801fd19-b403-4847-9992-7231fb423217 tempest-DeleteServersAdminTestJSON-895294080 tempest-DeleteServersAdminTestJSON-895294080-project-admin] Lock "e249c706-3196-4593-ae96-53f2619e0243" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.521s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.393044] env[61978]: DEBUG nova.compute.manager [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 868.578974] env[61978]: DEBUG nova.network.neutron [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Updated VIF entry in instance network info cache for port 51f0ce4a-1710-4256-9ca8-ac173927565b. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 868.579656] env[61978]: DEBUG nova.network.neutron [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Updating instance_info_cache with network_info: [{"id": "51f0ce4a-1710-4256-9ca8-ac173927565b", "address": "fa:16:3e:8e:77:cc", "network": {"id": "4b7af29e-cc4a-4765-8d05-6adf88573ad3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1006801081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00c674bbf1e945ba946d844f9856fdfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51f0ce4a-17", "ovs_interfaceid": "51f0ce4a-1710-4256-9ca8-ac173927565b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.602501] env[61978]: INFO nova.compute.manager [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Took 34.43 seconds to build instance. [ 868.613607] env[61978]: DEBUG oslo_vmware.api [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394667, 'name': PowerOffVM_Task, 'duration_secs': 0.33018} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.614283] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 868.614562] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 868.614875] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-680bc83b-e4e0-43f4-a5e7-6237a8b8081d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.646316] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394668, 'name': Rename_Task, 'duration_secs': 0.19848} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.648285] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 868.648285] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c2d2ed3-bebe-4903-9d1e-62eccd7e4399 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.656476] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 868.656476] env[61978]: value = "task-1394671" [ 868.656476] env[61978]: _type = "Task" [ 868.656476] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.672337] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.698353] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24c7b1d-142e-452f-a788-a5d70e0b5418 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.706683] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b4635b-ac0b-4bf4-a728-b7b4c8c267e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.743638] env[61978]: DEBUG nova.network.neutron [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Successfully updated port: c8d18564-3f86-41ec-88ac-735b63415259 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 868.745739] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f063bf-dc64-476a-91f5-5590271dd890 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.759429] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75133e4c-9d8e-4562-9aad-dd2c87f93720 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.769124] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394669, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.784702] env[61978]: DEBUG nova.compute.provider_tree [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.795328] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5269490b-9753-b7b0-ad5c-4168bf795b36, 'name': SearchDatastore_Task, 'duration_secs': 0.023619} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.796456] env[61978]: DEBUG nova.compute.manager [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 868.799455] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-718ce008-1a87-4cef-b924-dedd8cfff328 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.810824] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Waiting for the task: (returnval){ [ 868.810824] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524503c6-1a6a-9b93-cbd7-2455cab8a5c3" [ 868.810824] env[61978]: _type = "Task" [ 868.810824] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.821808] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524503c6-1a6a-9b93-cbd7-2455cab8a5c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.922480] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.084358] env[61978]: DEBUG oslo_concurrency.lockutils [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] Releasing lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.084358] env[61978]: DEBUG nova.compute.manager [req-a06cd2c2-a15f-422c-9e14-82f6d595b405 req-0b7987c8-5690-4c16-b9fe-a8581585fccf service nova] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Received event network-vif-deleted-85e56a49-da72-4b4e-9fa6-f0112967bcf0 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 869.105656] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db173f80-dbb0-42ec-9c0e-783e1a06be95 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "243e7146-46fc-43f4-a83b-cdc58f397f9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.953s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.170079] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394671, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.249612] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Acquiring lock "refresh_cache-081339d7-6d9b-4b66-a816-467d23196c9a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.249922] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Acquired lock "refresh_cache-081339d7-6d9b-4b66-a816-467d23196c9a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.249922] env[61978]: DEBUG nova.network.neutron [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.262922] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394669, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.844289} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.262922] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a0762952-2afd-448a-8e46-ba788a4ca131/a0762952-2afd-448a-8e46-ba788a4ca131.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 869.262922] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.263153] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9ee4c5b2-97fe-4db0-b955-970163971365 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.272154] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 869.272154] env[61978]: value = "task-1394673" [ 869.272154] env[61978]: _type = "Task" [ 869.272154] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.282414] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394673, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.289331] env[61978]: DEBUG nova.scheduler.client.report [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 869.322530] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524503c6-1a6a-9b93-cbd7-2455cab8a5c3, 'name': SearchDatastore_Task, 'duration_secs': 0.054895} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.322530] env[61978]: DEBUG oslo_concurrency.lockutils [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.322530] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 96a38ed0-c880-4f21-9389-99f039279072/96a38ed0-c880-4f21-9389-99f039279072.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.322688] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd95cb0f-582a-4e92-ae5a-8784608291cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.330432] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Waiting for the task: (returnval){ [ 869.330432] env[61978]: value = "task-1394674" [ 869.330432] env[61978]: _type = "Task" [ 869.330432] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.336774] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.343246] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394674, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.668765] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394671, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.788459] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394673, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.166843} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.788459] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.790029] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92bc493-64a0-4b00-be0d-236111bf0886 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.795865] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.980s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.810994] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.640s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.813242] env[61978]: INFO nova.compute.claims [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 869.831027] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] a0762952-2afd-448a-8e46-ba788a4ca131/a0762952-2afd-448a-8e46-ba788a4ca131.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.832509] env[61978]: DEBUG nova.network.neutron [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 869.835191] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a492b38-1e33-43e4-88d4-3b11dcd67d28 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.852866] env[61978]: INFO nova.scheduler.client.report [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Deleted allocations for instance 66ee1fd7-40f7-461f-b0c6-5951a58ac660 [ 869.871991] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394674, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.874694] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 869.874694] env[61978]: value = "task-1394675" [ 869.874694] env[61978]: _type = "Task" [ 869.874694] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.889479] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.115741] env[61978]: DEBUG nova.network.neutron [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Updating instance_info_cache with network_info: [{"id": "c8d18564-3f86-41ec-88ac-735b63415259", "address": "fa:16:3e:12:e9:1c", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.175", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8d18564-3f", "ovs_interfaceid": "c8d18564-3f86-41ec-88ac-735b63415259", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.168378] env[61978]: DEBUG oslo_vmware.api [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394671, 'name': PowerOnVM_Task, 'duration_secs': 1.157971} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.168666] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 870.168866] env[61978]: INFO nova.compute.manager [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Took 9.67 seconds to spawn the instance on the hypervisor. [ 870.169067] env[61978]: DEBUG nova.compute.manager [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 870.169854] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4567fc2a-fc47-4576-a074-e6b53275254f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.345557] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394674, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.846654} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.346166] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 96a38ed0-c880-4f21-9389-99f039279072/96a38ed0-c880-4f21-9389-99f039279072.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.346358] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.346605] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a33ee25b-9b25-4dca-9fa6-a7ab8806828c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.354180] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Waiting for the task: (returnval){ [ 870.354180] env[61978]: value = "task-1394676" [ 870.354180] env[61978]: _type = "Task" [ 870.354180] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.363338] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394676, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.378751] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3f03c01c-e1b3-457a-95a4-25855443acc1 tempest-ServerExternalEventsTest-400644720 tempest-ServerExternalEventsTest-400644720-project-member] Lock "66ee1fd7-40f7-461f-b0c6-5951a58ac660" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.869s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.386311] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394675, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.557198] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 870.557198] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 870.557505] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Deleting the datastore file [datastore2] 2084a365-b662-4564-b899-ab4c4a63f2b9 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.557687] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ebc9eb8-3ebd-4458-b619-941a8ed64f9a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.564660] env[61978]: DEBUG oslo_vmware.api [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Waiting for the task: (returnval){ [ 870.564660] env[61978]: value = "task-1394677" [ 870.564660] env[61978]: _type = "Task" [ 870.564660] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.572407] env[61978]: DEBUG oslo_vmware.api [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394677, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.619227] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Releasing lock "refresh_cache-081339d7-6d9b-4b66-a816-467d23196c9a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.619648] env[61978]: DEBUG nova.compute.manager [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Instance network_info: |[{"id": "c8d18564-3f86-41ec-88ac-735b63415259", "address": "fa:16:3e:12:e9:1c", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.175", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8d18564-3f", "ovs_interfaceid": "c8d18564-3f86-41ec-88ac-735b63415259", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 870.620347] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:e9:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8d18564-3f86-41ec-88ac-735b63415259', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 870.629316] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Creating folder: Project (5c777a4c08854c61beda5832aa015a8d). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.629657] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0428c5f9-99c7-4867-889c-30ca28df984f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.641343] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Created folder: Project (5c777a4c08854c61beda5832aa015a8d) in parent group-v295764. [ 870.641343] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Creating folder: Instances. Parent ref: group-v295811. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.641604] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6680a197-4a58-4b57-b122-bb458d590c98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.653482] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Created folder: Instances in parent group-v295811. [ 870.653482] env[61978]: DEBUG oslo.service.loopingcall [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 870.654532] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 870.654532] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97893ce6-7d69-4fa0-9788-fdabe1a35652 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.676861] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 870.676861] env[61978]: value = "task-1394680" [ 870.676861] env[61978]: _type = "Task" [ 870.676861] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.687239] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394680, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.693714] env[61978]: INFO nova.compute.manager [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Took 23.67 seconds to build instance. [ 870.870066] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394676, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.19728} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.871850] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.878182] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e2eafb-3542-4a72-937b-4d2c991dba35 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.909370] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 96a38ed0-c880-4f21-9389-99f039279072/96a38ed0-c880-4f21-9389-99f039279072.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.918352] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e16e07c-0255-4758-b245-0ad02cdde635 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.938214] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394675, 'name': ReconfigVM_Task, 'duration_secs': 0.98234} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.938878] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Reconfigured VM instance instance-0000000f to attach disk [datastore2] a0762952-2afd-448a-8e46-ba788a4ca131/a0762952-2afd-448a-8e46-ba788a4ca131.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.940693] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fcfe022e-a6e0-45f2-8c68-9b17da3a0c32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.944933] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Waiting for the task: (returnval){ [ 870.944933] env[61978]: value = "task-1394682" [ 870.944933] env[61978]: _type = "Task" [ 870.944933] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.951264] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 870.951264] env[61978]: value = "task-1394683" [ 870.951264] env[61978]: _type = "Task" [ 870.951264] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.961047] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394682, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.968229] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394683, 'name': Rename_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.078036] env[61978]: DEBUG oslo_vmware.api [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Task: {'id': task-1394677, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.411198} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.078549] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 871.079102] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 871.079554] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 871.079844] env[61978]: INFO nova.compute.manager [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Took 3.03 seconds to destroy the instance on the hypervisor. [ 871.080195] env[61978]: DEBUG oslo.service.loopingcall [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.080569] env[61978]: DEBUG nova.compute.manager [-] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 871.080908] env[61978]: DEBUG nova.network.neutron [-] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 871.196530] env[61978]: DEBUG oslo_concurrency.lockutils [None req-69cd9a96-e41b-4fa6-b284-d9e2dc759a0d tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "371ddf66-a39b-41c4-bbd1-2a1c1b99834e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.180s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.197139] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394680, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.286532] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43b7d2c-f2ea-43de-b379-f6da36448934 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.291776] env[61978]: DEBUG nova.compute.manager [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Received event network-vif-plugged-c8d18564-3f86-41ec-88ac-735b63415259 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 871.291920] env[61978]: DEBUG oslo_concurrency.lockutils [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] Acquiring lock "081339d7-6d9b-4b66-a816-467d23196c9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.292267] env[61978]: DEBUG oslo_concurrency.lockutils [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] Lock "081339d7-6d9b-4b66-a816-467d23196c9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.292555] env[61978]: DEBUG oslo_concurrency.lockutils [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] Lock "081339d7-6d9b-4b66-a816-467d23196c9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.292822] env[61978]: DEBUG nova.compute.manager [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] No waiting events found dispatching network-vif-plugged-c8d18564-3f86-41ec-88ac-735b63415259 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 871.293108] env[61978]: WARNING nova.compute.manager [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Received unexpected event network-vif-plugged-c8d18564-3f86-41ec-88ac-735b63415259 for instance with vm_state building and task_state spawning. [ 871.293382] env[61978]: DEBUG nova.compute.manager [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Received event network-changed-c8d18564-3f86-41ec-88ac-735b63415259 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 871.293650] env[61978]: DEBUG nova.compute.manager [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Refreshing instance network info cache due to event network-changed-c8d18564-3f86-41ec-88ac-735b63415259. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 871.293957] env[61978]: DEBUG oslo_concurrency.lockutils [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] Acquiring lock "refresh_cache-081339d7-6d9b-4b66-a816-467d23196c9a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.294156] env[61978]: DEBUG oslo_concurrency.lockutils [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] Acquired lock "refresh_cache-081339d7-6d9b-4b66-a816-467d23196c9a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.294337] env[61978]: DEBUG nova.network.neutron [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Refreshing network info cache for port c8d18564-3f86-41ec-88ac-735b63415259 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 871.303652] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57781c49-7606-43d3-9882-2b8bfd48f868 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.340660] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8a900a-cfb0-4a7e-b6b0-643156787226 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.349721] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0b5986-b49c-40b2-b1c3-700842370674 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.367204] env[61978]: DEBUG nova.compute.provider_tree [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.459807] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394682, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.466217] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394683, 'name': Rename_Task, 'duration_secs': 0.343239} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.466217] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 871.466397] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a5a9194-a14f-4f5e-8406-8f870e674103 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.474140] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 871.474140] env[61978]: value = "task-1394684" [ 871.474140] env[61978]: _type = "Task" [ 871.474140] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.490328] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394684, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.694035] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394680, 'name': CreateVM_Task, 'duration_secs': 0.583101} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.694237] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.697135] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.697135] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.697135] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 871.697135] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0df14589-5326-4aed-9c1c-ca212d139a43 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.700805] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Waiting for the task: (returnval){ [ 871.700805] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52262455-9d07-5698-efcf-606d587c6625" [ 871.700805] env[61978]: _type = "Task" [ 871.700805] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.714135] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52262455-9d07-5698-efcf-606d587c6625, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.871131] env[61978]: DEBUG nova.scheduler.client.report [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 871.964698] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394682, 'name': ReconfigVM_Task, 'duration_secs': 0.898036} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.965012] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 96a38ed0-c880-4f21-9389-99f039279072/96a38ed0-c880-4f21-9389-99f039279072.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.965707] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0356466c-4b4b-4c03-8933-c4033c76489b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.973985] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Waiting for the task: (returnval){ [ 871.973985] env[61978]: value = "task-1394685" [ 871.973985] env[61978]: _type = "Task" [ 871.973985] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.991089] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394685, 'name': Rename_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.996066] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394684, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.012427] env[61978]: DEBUG nova.network.neutron [-] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.215078] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52262455-9d07-5698-efcf-606d587c6625, 'name': SearchDatastore_Task, 'duration_secs': 0.046558} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.215078] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.215303] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.215537] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.215935] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.215935] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.216175] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4fb731f8-5b3f-4574-8943-8de83aaba41f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.226385] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.226385] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 872.227641] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-957272e8-20b3-4155-8fdb-3db720e92a33 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.235031] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Waiting for the task: (returnval){ [ 872.235031] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a3a9ef-a758-8438-bbef-1cbc222d8a50" [ 872.235031] env[61978]: _type = "Task" [ 872.235031] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.250259] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a3a9ef-a758-8438-bbef-1cbc222d8a50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.376637] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.566s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.378029] env[61978]: DEBUG nova.compute.manager [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 872.381527] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 12.078s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.385020] env[61978]: DEBUG nova.objects.instance [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61978) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 872.407220] env[61978]: DEBUG nova.network.neutron [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Updated VIF entry in instance network info cache for port c8d18564-3f86-41ec-88ac-735b63415259. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 872.408194] env[61978]: DEBUG nova.network.neutron [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Updating instance_info_cache with network_info: [{"id": "c8d18564-3f86-41ec-88ac-735b63415259", "address": "fa:16:3e:12:e9:1c", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.175", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8d18564-3f", "ovs_interfaceid": "c8d18564-3f86-41ec-88ac-735b63415259", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.491153] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394685, 'name': Rename_Task, 'duration_secs': 0.158958} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.494805] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.495296] env[61978]: DEBUG oslo_vmware.api [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394684, 'name': PowerOnVM_Task, 'duration_secs': 0.841226} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.495406] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9af71ce6-8c8f-4d99-b5fd-54ae376bec2b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.497322] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 872.497589] env[61978]: INFO nova.compute.manager [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Took 9.35 seconds to spawn the instance on the hypervisor. [ 872.497679] env[61978]: DEBUG nova.compute.manager [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 872.499539] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181eca85-f376-4f25-8809-311ef9dfb9aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.512795] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Waiting for the task: (returnval){ [ 872.512795] env[61978]: value = "task-1394686" [ 872.512795] env[61978]: _type = "Task" [ 872.512795] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.516965] env[61978]: INFO nova.compute.manager [-] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Took 1.44 seconds to deallocate network for instance. [ 872.526852] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394686, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.755202] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "50788030-4dc2-4215-bf2c-acba5dd33ce4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.755615] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "50788030-4dc2-4215-bf2c-acba5dd33ce4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.762835] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a3a9ef-a758-8438-bbef-1cbc222d8a50, 'name': SearchDatastore_Task, 'duration_secs': 0.011026} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.763912] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4428a7e6-216c-4a5d-8ad9-9d9bea82ad74 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.772611] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Waiting for the task: (returnval){ [ 872.772611] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bb1ba0-4b0b-407c-a5bf-ae1c6fb19c5e" [ 872.772611] env[61978]: _type = "Task" [ 872.772611] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.783287] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bb1ba0-4b0b-407c-a5bf-ae1c6fb19c5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.809898] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "bb0c149c-920e-47c4-a960-47b2fb443431" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.809898] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "bb0c149c-920e-47c4-a960-47b2fb443431" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.878742] env[61978]: DEBUG nova.compute.manager [req-925c60c3-aef1-4e21-9f33-f0071bcc524a req-18720013-c648-4a13-a725-78ff32695693 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Received event network-changed-82595737-f96a-45c3-9bcc-2642e53bdaec {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 872.881159] env[61978]: DEBUG nova.compute.manager [req-925c60c3-aef1-4e21-9f33-f0071bcc524a req-18720013-c648-4a13-a725-78ff32695693 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Refreshing instance network info cache due to event network-changed-82595737-f96a-45c3-9bcc-2642e53bdaec. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 872.881159] env[61978]: DEBUG oslo_concurrency.lockutils [req-925c60c3-aef1-4e21-9f33-f0071bcc524a req-18720013-c648-4a13-a725-78ff32695693 service nova] Acquiring lock "refresh_cache-eb7cb200-c162-4e92-8916-6d9abd5cf34d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.881159] env[61978]: DEBUG oslo_concurrency.lockutils [req-925c60c3-aef1-4e21-9f33-f0071bcc524a req-18720013-c648-4a13-a725-78ff32695693 service nova] Acquired lock "refresh_cache-eb7cb200-c162-4e92-8916-6d9abd5cf34d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.881159] env[61978]: DEBUG nova.network.neutron [req-925c60c3-aef1-4e21-9f33-f0071bcc524a req-18720013-c648-4a13-a725-78ff32695693 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Refreshing network info cache for port 82595737-f96a-45c3-9bcc-2642e53bdaec {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 872.886538] env[61978]: DEBUG nova.compute.utils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 872.892206] env[61978]: DEBUG nova.compute.manager [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 872.892206] env[61978]: DEBUG nova.network.neutron [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 872.918419] env[61978]: DEBUG oslo_concurrency.lockutils [req-4fa3f12f-02a0-4b33-8e40-d8caeba87412 req-e0d2d710-0d20-4e41-8352-8150e6aea94d service nova] Releasing lock "refresh_cache-081339d7-6d9b-4b66-a816-467d23196c9a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.028734] env[61978]: INFO nova.compute.manager [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Took 23.37 seconds to build instance. [ 873.033379] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.041452] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394686, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.081555] env[61978]: DEBUG nova.policy [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85c9b6562cd44989a9e9250bc0d7fdd4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ed07a0f23094421876c28a10c8adbe8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 873.264204] env[61978]: DEBUG nova.compute.manager [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 873.284580] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bb1ba0-4b0b-407c-a5bf-ae1c6fb19c5e, 'name': SearchDatastore_Task, 'duration_secs': 0.013589} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.284876] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.285200] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 081339d7-6d9b-4b66-a816-467d23196c9a/081339d7-6d9b-4b66-a816-467d23196c9a.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 873.285505] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ecfa183d-e9b2-4afe-8f90-120e3d4d08a6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.295665] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Waiting for the task: (returnval){ [ 873.295665] env[61978]: value = "task-1394688" [ 873.295665] env[61978]: _type = "Task" [ 873.295665] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.307063] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394688, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.312334] env[61978]: DEBUG nova.compute.manager [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 873.389651] env[61978]: DEBUG nova.compute.manager [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 873.398383] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c69468b-2dca-4b5b-bde0-94ec6eda870c tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.399468] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.547s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.402034] env[61978]: INFO nova.compute.claims [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.533691] env[61978]: DEBUG oslo_vmware.api [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Task: {'id': task-1394686, 'name': PowerOnVM_Task, 'duration_secs': 0.778334} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.534195] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b4344be-2871-4a76-9cea-8dbee61f01b2 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "a0762952-2afd-448a-8e46-ba788a4ca131" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.326s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.534615] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.534615] env[61978]: DEBUG nova.compute.manager [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 873.535606] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0229f08-4d31-4c62-bdce-b8d0d5736f5c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.794558] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.812964] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394688, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.840446] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.061705] env[61978]: DEBUG oslo_concurrency.lockutils [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.309765] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394688, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.822087} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.310060] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 081339d7-6d9b-4b66-a816-467d23196c9a/081339d7-6d9b-4b66-a816-467d23196c9a.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 874.310283] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 874.310711] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0cf085d-19c9-49d2-a4ec-4be9cfbf8b36 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.320104] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Waiting for the task: (returnval){ [ 874.320104] env[61978]: value = "task-1394689" [ 874.320104] env[61978]: _type = "Task" [ 874.320104] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.338979] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394689, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.359606] env[61978]: DEBUG nova.network.neutron [req-925c60c3-aef1-4e21-9f33-f0071bcc524a req-18720013-c648-4a13-a725-78ff32695693 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Updated VIF entry in instance network info cache for port 82595737-f96a-45c3-9bcc-2642e53bdaec. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 874.359606] env[61978]: DEBUG nova.network.neutron [req-925c60c3-aef1-4e21-9f33-f0071bcc524a req-18720013-c648-4a13-a725-78ff32695693 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Updating instance_info_cache with network_info: [{"id": "82595737-f96a-45c3-9bcc-2642e53bdaec", "address": "fa:16:3e:b5:82:28", "network": {"id": "05b0f51b-9661-4555-9763-fcc5acf38ace", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-393436776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51fc76950ff44b69f26a6977be315bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bfa7abe-7e46-4d8f-b50a-4d0c4509e4dc", "external-id": "nsx-vlan-transportzone-951", "segmentation_id": 951, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82595737-f9", "ovs_interfaceid": "82595737-f96a-45c3-9bcc-2642e53bdaec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.365457] env[61978]: DEBUG nova.compute.manager [req-5007916e-cc9d-4c7c-9468-1a4cf883535c req-29a14cdb-7f72-410a-b475-0735df447c7c service nova] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Received event network-vif-deleted-a9f70cfc-477a-44cc-8077-b23baa39cda7 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 874.401407] env[61978]: DEBUG nova.compute.manager [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 874.440250] env[61978]: DEBUG nova.virt.hardware [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 874.440457] env[61978]: DEBUG nova.virt.hardware [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 874.440611] env[61978]: DEBUG nova.virt.hardware [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 874.440787] env[61978]: DEBUG nova.virt.hardware [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 874.440928] env[61978]: DEBUG nova.virt.hardware [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 874.441085] env[61978]: DEBUG nova.virt.hardware [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 874.441297] env[61978]: DEBUG nova.virt.hardware [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 874.441468] env[61978]: DEBUG nova.virt.hardware [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 874.441877] env[61978]: DEBUG nova.virt.hardware [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 874.441934] env[61978]: DEBUG nova.virt.hardware [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 874.442211] env[61978]: DEBUG nova.virt.hardware [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 874.443502] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0e489b-545d-4085-a947-cb24b4474ee7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.454457] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f4b61b-74e9-4476-b911-fcd218030b76 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.507680] env[61978]: DEBUG nova.network.neutron [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Successfully created port: f98d4797-3791-4132-9a71-1b520fa8e5e8 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 874.814246] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d687baa3-f08e-4c5c-a7b3-cec50550f4b7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.828283] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6473c0c8-14a3-4607-9bc8-402b4d7a4f1f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.843277] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394689, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071211} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.870522] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 874.871330] env[61978]: DEBUG oslo_concurrency.lockutils [req-925c60c3-aef1-4e21-9f33-f0071bcc524a req-18720013-c648-4a13-a725-78ff32695693 service nova] Releasing lock "refresh_cache-eb7cb200-c162-4e92-8916-6d9abd5cf34d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.873858] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79eb69bc-6490-4c82-8e00-f4d77cb5708d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.877730] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f23e52-4e5d-4d54-a4c8-ae0117ed2449 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.910847] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] 081339d7-6d9b-4b66-a816-467d23196c9a/081339d7-6d9b-4b66-a816-467d23196c9a.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 874.912303] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494e6e21-baf2-4de9-9576-6d2fe9d840bf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.919053] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20b9a631-33d8-45fa-b40a-623f67d32ae2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.946701] env[61978]: DEBUG nova.compute.provider_tree [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.950062] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Waiting for the task: (returnval){ [ 874.950062] env[61978]: value = "task-1394690" [ 874.950062] env[61978]: _type = "Task" [ 874.950062] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.961995] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394690, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.451941] env[61978]: DEBUG nova.scheduler.client.report [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 875.474481] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394690, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.968524] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.968839] env[61978]: DEBUG nova.compute.manager [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 875.973221] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.101s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.975258] env[61978]: INFO nova.compute.claims [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.989177] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394690, 'name': ReconfigVM_Task, 'duration_secs': 0.567795} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.989177] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Reconfigured VM instance instance-00000010 to attach disk [datastore2] 081339d7-6d9b-4b66-a816-467d23196c9a/081339d7-6d9b-4b66-a816-467d23196c9a.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 875.989177] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7864703-cdfd-4ce8-878b-7ddd79b9d48e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.997389] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Waiting for the task: (returnval){ [ 875.997389] env[61978]: value = "task-1394692" [ 875.997389] env[61978]: _type = "Task" [ 875.997389] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.009317] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394692, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.485447] env[61978]: DEBUG nova.compute.utils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 876.486840] env[61978]: DEBUG nova.compute.manager [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 876.487023] env[61978]: DEBUG nova.network.neutron [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 876.510323] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394692, 'name': Rename_Task, 'duration_secs': 0.261486} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.511248] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 876.511375] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b32127e-4cc6-40ac-9ffe-b9ec6023c497 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.521212] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Waiting for the task: (returnval){ [ 876.521212] env[61978]: value = "task-1394693" [ 876.521212] env[61978]: _type = "Task" [ 876.521212] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.534746] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394693, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.556221] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.556569] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.556770] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.568231] env[61978]: DEBUG nova.policy [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '354d8a810de04cda9ef973275109aa2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a3471435d4747648cd8ddf0817d9b85', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 876.945749] env[61978]: DEBUG nova.compute.manager [req-19347a01-96e5-427e-bf81-da7c2f12d3fe req-7d6bb297-ca22-470a-93ab-94edfa18045c service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Received event network-changed-2daa968c-ac9c-4f15-ad2b-7977f5581ef1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 876.945944] env[61978]: DEBUG nova.compute.manager [req-19347a01-96e5-427e-bf81-da7c2f12d3fe req-7d6bb297-ca22-470a-93ab-94edfa18045c service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Refreshing instance network info cache due to event network-changed-2daa968c-ac9c-4f15-ad2b-7977f5581ef1. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 876.946311] env[61978]: DEBUG oslo_concurrency.lockutils [req-19347a01-96e5-427e-bf81-da7c2f12d3fe req-7d6bb297-ca22-470a-93ab-94edfa18045c service nova] Acquiring lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.946385] env[61978]: DEBUG oslo_concurrency.lockutils [req-19347a01-96e5-427e-bf81-da7c2f12d3fe req-7d6bb297-ca22-470a-93ab-94edfa18045c service nova] Acquired lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.946507] env[61978]: DEBUG nova.network.neutron [req-19347a01-96e5-427e-bf81-da7c2f12d3fe req-7d6bb297-ca22-470a-93ab-94edfa18045c service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Refreshing network info cache for port 2daa968c-ac9c-4f15-ad2b-7977f5581ef1 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 876.990935] env[61978]: DEBUG nova.compute.manager [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 877.038591] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394693, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.059976] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.386439] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927f25fc-7043-486a-a887-418434836446 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.394965] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bb1054-a6eb-4fc2-8e51-9bd0933d8f3e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.434079] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92748ad-d817-47d4-9448-916cd97a9a23 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.443854] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af6c05d-c246-4532-9d53-b4bff2aada90 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.463585] env[61978]: DEBUG nova.compute.provider_tree [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.534016] env[61978]: DEBUG oslo_vmware.api [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394693, 'name': PowerOnVM_Task, 'duration_secs': 0.937411} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.534562] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.535236] env[61978]: INFO nova.compute.manager [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Took 9.73 seconds to spawn the instance on the hypervisor. [ 877.535236] env[61978]: DEBUG nova.compute.manager [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 877.535742] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5093cd-95b7-4d3e-bd0b-688ed7254287 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.554184] env[61978]: DEBUG nova.network.neutron [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Successfully created port: ebb15ef6-0310-4f67-8247-f09f03d452db {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.767019] env[61978]: DEBUG nova.network.neutron [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Successfully updated port: f98d4797-3791-4132-9a71-1b520fa8e5e8 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 877.967383] env[61978]: DEBUG nova.scheduler.client.report [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 878.002555] env[61978]: DEBUG nova.compute.manager [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 878.046333] env[61978]: DEBUG nova.virt.hardware [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 878.046649] env[61978]: DEBUG nova.virt.hardware [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 878.046817] env[61978]: DEBUG nova.virt.hardware [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.047038] env[61978]: DEBUG nova.virt.hardware [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 878.047210] env[61978]: DEBUG nova.virt.hardware [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.047365] env[61978]: DEBUG nova.virt.hardware [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 878.047580] env[61978]: DEBUG nova.virt.hardware [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 878.047971] env[61978]: DEBUG nova.virt.hardware [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 878.047971] env[61978]: DEBUG nova.virt.hardware [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 878.048109] env[61978]: DEBUG nova.virt.hardware [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 878.048251] env[61978]: DEBUG nova.virt.hardware [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 878.049140] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9c3161-380f-4874-bce3-a7378a0f2b98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.068412] env[61978]: INFO nova.compute.manager [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Took 25.44 seconds to build instance. [ 878.073342] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43e3831-dd01-4fae-b90d-825dd4101227 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.275151] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "refresh_cache-d2614f71-3026-41d4-ae04-eaede9b5ead5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.275151] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquired lock "refresh_cache-d2614f71-3026-41d4-ae04-eaede9b5ead5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.275151] env[61978]: DEBUG nova.network.neutron [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 878.474590] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.502s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.475150] env[61978]: DEBUG nova.compute.manager [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 878.479528] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.951s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.481316] env[61978]: INFO nova.compute.claims [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.498215] env[61978]: DEBUG nova.network.neutron [req-19347a01-96e5-427e-bf81-da7c2f12d3fe req-7d6bb297-ca22-470a-93ab-94edfa18045c service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updated VIF entry in instance network info cache for port 2daa968c-ac9c-4f15-ad2b-7977f5581ef1. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 878.498612] env[61978]: DEBUG nova.network.neutron [req-19347a01-96e5-427e-bf81-da7c2f12d3fe req-7d6bb297-ca22-470a-93ab-94edfa18045c service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updating instance_info_cache with network_info: [{"id": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "address": "fa:16:3e:0c:ea:2e", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2daa968c-ac", "ovs_interfaceid": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.571097] env[61978]: DEBUG oslo_concurrency.lockutils [None req-012f2487-ab6c-4c40-b825-2718f2c6b3d5 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Lock "081339d7-6d9b-4b66-a816-467d23196c9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.501s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.680436] env[61978]: DEBUG nova.compute.manager [req-57a5b558-2125-437d-87e9-15c8e4f408e0 req-6d6991cb-555e-4a8b-95c0-8a1e09255195 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Received event network-changed-1d9ac1c2-acc1-4cef-8a1e-445797f69a52 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 878.680509] env[61978]: DEBUG nova.compute.manager [req-57a5b558-2125-437d-87e9-15c8e4f408e0 req-6d6991cb-555e-4a8b-95c0-8a1e09255195 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Refreshing instance network info cache due to event network-changed-1d9ac1c2-acc1-4cef-8a1e-445797f69a52. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 878.680706] env[61978]: DEBUG oslo_concurrency.lockutils [req-57a5b558-2125-437d-87e9-15c8e4f408e0 req-6d6991cb-555e-4a8b-95c0-8a1e09255195 service nova] Acquiring lock "refresh_cache-371ddf66-a39b-41c4-bbd1-2a1c1b99834e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.680837] env[61978]: DEBUG oslo_concurrency.lockutils [req-57a5b558-2125-437d-87e9-15c8e4f408e0 req-6d6991cb-555e-4a8b-95c0-8a1e09255195 service nova] Acquired lock "refresh_cache-371ddf66-a39b-41c4-bbd1-2a1c1b99834e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.681008] env[61978]: DEBUG nova.network.neutron [req-57a5b558-2125-437d-87e9-15c8e4f408e0 req-6d6991cb-555e-4a8b-95c0-8a1e09255195 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Refreshing network info cache for port 1d9ac1c2-acc1-4cef-8a1e-445797f69a52 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.859726] env[61978]: DEBUG nova.network.neutron [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 878.988247] env[61978]: DEBUG nova.compute.utils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 878.993070] env[61978]: DEBUG nova.compute.manager [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 878.993351] env[61978]: DEBUG nova.network.neutron [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 879.005045] env[61978]: DEBUG oslo_concurrency.lockutils [req-19347a01-96e5-427e-bf81-da7c2f12d3fe req-7d6bb297-ca22-470a-93ab-94edfa18045c service nova] Releasing lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.167161] env[61978]: DEBUG nova.policy [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1ca19a0dd1a4ea9b6f2ea5e0cc21695', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8499568efa184c35a99e2959f04273fc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 879.285751] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquiring lock "96a38ed0-c880-4f21-9389-99f039279072" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.286116] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lock "96a38ed0-c880-4f21-9389-99f039279072" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.288353] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquiring lock "96a38ed0-c880-4f21-9389-99f039279072-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.288353] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lock "96a38ed0-c880-4f21-9389-99f039279072-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.288353] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lock "96a38ed0-c880-4f21-9389-99f039279072-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.291741] env[61978]: INFO nova.compute.manager [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Terminating instance [ 879.299275] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquiring lock "refresh_cache-96a38ed0-c880-4f21-9389-99f039279072" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.299275] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquired lock "refresh_cache-96a38ed0-c880-4f21-9389-99f039279072" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.299275] env[61978]: DEBUG nova.network.neutron [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.343833] env[61978]: DEBUG nova.network.neutron [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Updating instance_info_cache with network_info: [{"id": "f98d4797-3791-4132-9a71-1b520fa8e5e8", "address": "fa:16:3e:fa:11:fe", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.249", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf98d4797-37", "ovs_interfaceid": "f98d4797-3791-4132-9a71-1b520fa8e5e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.351010] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "c17c986e-c008-4414-8dd1-4ea836458048" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.351582] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "c17c986e-c008-4414-8dd1-4ea836458048" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.371289] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Acquiring lock "ff793464-9bef-449f-8485-36d3b8fb1d69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.371537] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Lock "ff793464-9bef-449f-8485-36d3b8fb1d69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.494910] env[61978]: DEBUG nova.compute.manager [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 879.846160] env[61978]: DEBUG nova.network.neutron [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 879.849731] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Releasing lock "refresh_cache-d2614f71-3026-41d4-ae04-eaede9b5ead5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.850209] env[61978]: DEBUG nova.compute.manager [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Instance network_info: |[{"id": "f98d4797-3791-4132-9a71-1b520fa8e5e8", "address": "fa:16:3e:fa:11:fe", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.249", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf98d4797-37", "ovs_interfaceid": "f98d4797-3791-4132-9a71-1b520fa8e5e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 879.850697] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:11:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f98d4797-3791-4132-9a71-1b520fa8e5e8', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 879.860542] env[61978]: DEBUG oslo.service.loopingcall [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.866783] env[61978]: DEBUG nova.compute.manager [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 879.866783] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 879.868363] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ace1f447-cebe-48bc-af25-bdbb6fc2c4ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.887778] env[61978]: DEBUG nova.compute.manager [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 879.897338] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 879.897338] env[61978]: value = "task-1394696" [ 879.897338] env[61978]: _type = "Task" [ 879.897338] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.912044] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394696, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.967140] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7143d9c9-6acc-4650-aa9b-b50810a2a6e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.978904] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752d86b0-4055-4265-8aa3-08dd6b893e23 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.023966] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a6c57f-de6a-4aba-a9c9-4f2bf5bacc38 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.027527] env[61978]: DEBUG nova.network.neutron [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Successfully created port: 51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.034676] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb4ffcd-6242-4ff2-a2a3-abf445cdf4ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.052991] env[61978]: DEBUG nova.compute.provider_tree [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.062210] env[61978]: DEBUG nova.network.neutron [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.207428] env[61978]: DEBUG nova.network.neutron [req-57a5b558-2125-437d-87e9-15c8e4f408e0 req-6d6991cb-555e-4a8b-95c0-8a1e09255195 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Updated VIF entry in instance network info cache for port 1d9ac1c2-acc1-4cef-8a1e-445797f69a52. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 880.207779] env[61978]: DEBUG nova.network.neutron [req-57a5b558-2125-437d-87e9-15c8e4f408e0 req-6d6991cb-555e-4a8b-95c0-8a1e09255195 service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Updating instance_info_cache with network_info: [{"id": "1d9ac1c2-acc1-4cef-8a1e-445797f69a52", "address": "fa:16:3e:da:2b:8c", "network": {"id": "c16671c6-39f3-456e-b159-3af6a9553564", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1089542181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add5612301884f668bbe80681629e8d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d9ac1c2-ac", "ovs_interfaceid": "1d9ac1c2-acc1-4cef-8a1e-445797f69a52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.392084] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.414145] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394696, 'name': CreateVM_Task, 'duration_secs': 0.478032} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.415122] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 880.415859] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.416073] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.416695] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 880.416695] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d503a072-8c9c-451d-9d91-afdb5f1117d8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.423142] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 880.423142] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cbc42b-a077-25b2-e337-eda0ec1cd05d" [ 880.423142] env[61978]: _type = "Task" [ 880.423142] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.428172] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.434591] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cbc42b-a077-25b2-e337-eda0ec1cd05d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.527335] env[61978]: DEBUG nova.network.neutron [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Successfully updated port: ebb15ef6-0310-4f67-8247-f09f03d452db {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 880.529020] env[61978]: DEBUG nova.compute.manager [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 880.557590] env[61978]: DEBUG nova.scheduler.client.report [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 880.565109] env[61978]: DEBUG nova.virt.hardware [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 880.565352] env[61978]: DEBUG nova.virt.hardware [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 880.565492] env[61978]: DEBUG nova.virt.hardware [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.567227] env[61978]: DEBUG nova.virt.hardware [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 880.567329] env[61978]: DEBUG nova.virt.hardware [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.567475] env[61978]: DEBUG nova.virt.hardware [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 880.567696] env[61978]: DEBUG nova.virt.hardware [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 880.567853] env[61978]: DEBUG nova.virt.hardware [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 880.568044] env[61978]: DEBUG nova.virt.hardware [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 880.568218] env[61978]: DEBUG nova.virt.hardware [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 880.568389] env[61978]: DEBUG nova.virt.hardware [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 880.569279] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Releasing lock "refresh_cache-96a38ed0-c880-4f21-9389-99f039279072" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.569675] env[61978]: DEBUG nova.compute.manager [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 880.570012] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 880.571132] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24edd949-a33b-4088-85d6-d8701651dfba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.576185] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c657aab2-3ef9-43a8-ab90-4ff05245e305 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.588280] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79a233d-cb4e-4abe-8b42-ae225e2b6594 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.599666] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 880.600140] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a615069e-fed7-45c4-a365-fffb1073a87d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.616739] env[61978]: DEBUG oslo_vmware.api [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 880.616739] env[61978]: value = "task-1394697" [ 880.616739] env[61978]: _type = "Task" [ 880.616739] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.628833] env[61978]: DEBUG oslo_vmware.api [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394697, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.714344] env[61978]: DEBUG oslo_concurrency.lockutils [req-57a5b558-2125-437d-87e9-15c8e4f408e0 req-6d6991cb-555e-4a8b-95c0-8a1e09255195 service nova] Releasing lock "refresh_cache-371ddf66-a39b-41c4-bbd1-2a1c1b99834e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.939829] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cbc42b-a077-25b2-e337-eda0ec1cd05d, 'name': SearchDatastore_Task, 'duration_secs': 0.030756} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.940183] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.940443] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 880.940746] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.940850] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.941067] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 880.941400] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-532050e2-d6a7-4aed-b757-ac2c1dfcec38 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.953688] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 880.953817] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 880.954843] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b00432b6-6d35-410f-bacd-2280ab412b0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.962080] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 880.962080] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520ac244-836d-f353-1aa9-c2f235d9fcef" [ 880.962080] env[61978]: _type = "Task" [ 880.962080] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.972914] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520ac244-836d-f353-1aa9-c2f235d9fcef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.032839] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "refresh_cache-96bef3f3-a45c-43ba-a86a-66c1d5686ea6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.033098] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquired lock "refresh_cache-96bef3f3-a45c-43ba-a86a-66c1d5686ea6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.033316] env[61978]: DEBUG nova.network.neutron [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.077165] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.598s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.078069] env[61978]: DEBUG nova.compute.manager [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 881.080620] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.201s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.081027] env[61978]: DEBUG nova.objects.instance [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Lazy-loading 'resources' on Instance uuid 5d48e854-45fd-4767-91b7-100f84bdca55 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.129396] env[61978]: DEBUG oslo_vmware.api [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394697, 'name': PowerOffVM_Task, 'duration_secs': 0.318005} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.129817] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 881.132025] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 881.132025] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0fafc72-1e1e-498b-a6b7-c2b0f689ec43 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.164375] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 881.164633] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 881.165386] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Deleting the datastore file [datastore2] 96a38ed0-c880-4f21-9389-99f039279072 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 881.165386] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea529658-8c72-4750-ac37-0bc511a1a7ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.178943] env[61978]: DEBUG oslo_vmware.api [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for the task: (returnval){ [ 881.178943] env[61978]: value = "task-1394699" [ 881.178943] env[61978]: _type = "Task" [ 881.178943] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.187367] env[61978]: DEBUG oslo_vmware.api [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394699, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.473147] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520ac244-836d-f353-1aa9-c2f235d9fcef, 'name': SearchDatastore_Task, 'duration_secs': 0.014565} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.474272] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2113eb89-e514-4d95-afdf-de0582c394fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.479877] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 881.479877] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52dabd42-121f-07f4-4b4e-841638513152" [ 881.479877] env[61978]: _type = "Task" [ 881.479877] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.489347] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52dabd42-121f-07f4-4b4e-841638513152, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.584109] env[61978]: DEBUG nova.compute.manager [req-5e165998-762e-471e-b74f-905ad947e34a req-101964fb-01e6-4dc3-ad8c-aa364c048aea service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Received event network-changed-a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 881.584392] env[61978]: DEBUG nova.compute.manager [req-5e165998-762e-471e-b74f-905ad947e34a req-101964fb-01e6-4dc3-ad8c-aa364c048aea service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Refreshing instance network info cache due to event network-changed-a5290cfd-6d88-4c49-a54c-626d4c4843bd. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 881.584612] env[61978]: DEBUG oslo_concurrency.lockutils [req-5e165998-762e-471e-b74f-905ad947e34a req-101964fb-01e6-4dc3-ad8c-aa364c048aea service nova] Acquiring lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.584799] env[61978]: DEBUG oslo_concurrency.lockutils [req-5e165998-762e-471e-b74f-905ad947e34a req-101964fb-01e6-4dc3-ad8c-aa364c048aea service nova] Acquired lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.584944] env[61978]: DEBUG nova.network.neutron [req-5e165998-762e-471e-b74f-905ad947e34a req-101964fb-01e6-4dc3-ad8c-aa364c048aea service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Refreshing network info cache for port a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 881.590555] env[61978]: DEBUG nova.compute.utils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.592556] env[61978]: DEBUG nova.network.neutron [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 881.594874] env[61978]: DEBUG nova.compute.manager [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 881.595011] env[61978]: DEBUG nova.network.neutron [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 881.693236] env[61978]: DEBUG oslo_vmware.api [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Task: {'id': task-1394699, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310345} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.693580] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 881.693785] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 881.694383] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 881.694574] env[61978]: INFO nova.compute.manager [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Took 1.12 seconds to destroy the instance on the hypervisor. [ 881.694812] env[61978]: DEBUG oslo.service.loopingcall [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 881.695010] env[61978]: DEBUG nova.compute.manager [-] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 881.695113] env[61978]: DEBUG nova.network.neutron [-] [instance: 96a38ed0-c880-4f21-9389-99f039279072] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 881.720567] env[61978]: DEBUG nova.network.neutron [-] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 881.746165] env[61978]: DEBUG nova.policy [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9710d9ea81924b3a8d8315fcacf94c02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82ec8701f6504322a0d00feb6c15f0be', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 881.992113] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52dabd42-121f-07f4-4b4e-841638513152, 'name': SearchDatastore_Task, 'duration_secs': 0.068513} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.995022] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.995713] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] d2614f71-3026-41d4-ae04-eaede9b5ead5/d2614f71-3026-41d4-ae04-eaede9b5ead5.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 881.995997] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab82176b-15a8-4b1f-8e07-1191fe70879c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.007204] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 882.007204] env[61978]: value = "task-1394701" [ 882.007204] env[61978]: _type = "Task" [ 882.007204] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.014798] env[61978]: DEBUG nova.network.neutron [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Updating instance_info_cache with network_info: [{"id": "ebb15ef6-0310-4f67-8247-f09f03d452db", "address": "fa:16:3e:e2:a7:be", "network": {"id": "140282e9-127a-4f19-b6d1-6bea55474c67", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-904021562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a3471435d4747648cd8ddf0817d9b85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebb15ef6-03", "ovs_interfaceid": "ebb15ef6-0310-4f67-8247-f09f03d452db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.021771] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394701, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.026503] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc0c3ec-a2a2-4b88-8f05-ca9beddb525d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.037417] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8585f5c5-ac98-4915-b08a-30b2ac3f9ac6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.074717] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da598b4b-406e-4138-994b-95139338ea96 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.083907] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25c388e-e2d3-4967-9f1e-2696fbae7120 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.101278] env[61978]: DEBUG nova.compute.provider_tree [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.103340] env[61978]: DEBUG nova.compute.manager [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 882.224187] env[61978]: DEBUG nova.network.neutron [-] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.467179] env[61978]: DEBUG nova.network.neutron [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Successfully updated port: 51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 882.520262] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Releasing lock "refresh_cache-96bef3f3-a45c-43ba-a86a-66c1d5686ea6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.520785] env[61978]: DEBUG nova.compute.manager [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Instance network_info: |[{"id": "ebb15ef6-0310-4f67-8247-f09f03d452db", "address": "fa:16:3e:e2:a7:be", "network": {"id": "140282e9-127a-4f19-b6d1-6bea55474c67", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-904021562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a3471435d4747648cd8ddf0817d9b85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebb15ef6-03", "ovs_interfaceid": "ebb15ef6-0310-4f67-8247-f09f03d452db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 882.523018] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394701, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.523018] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:a7:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eed34ae1-5f7f-4deb-9db8-85eaa1e60c29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ebb15ef6-0310-4f67-8247-f09f03d452db', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.532356] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Creating folder: Project (4a3471435d4747648cd8ddf0817d9b85). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 882.535996] env[61978]: DEBUG nova.network.neutron [req-5e165998-762e-471e-b74f-905ad947e34a req-101964fb-01e6-4dc3-ad8c-aa364c048aea service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updated VIF entry in instance network info cache for port a5290cfd-6d88-4c49-a54c-626d4c4843bd. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 882.535996] env[61978]: DEBUG nova.network.neutron [req-5e165998-762e-471e-b74f-905ad947e34a req-101964fb-01e6-4dc3-ad8c-aa364c048aea service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updating instance_info_cache with network_info: [{"id": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "address": "fa:16:3e:ed:43:9e", "network": {"id": "4b7af29e-cc4a-4765-8d05-6adf88573ad3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1006801081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00c674bbf1e945ba946d844f9856fdfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5290cfd-6d", "ovs_interfaceid": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.536304] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6200a55-81b7-4911-ace3-a8aca0f12b80 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.551021] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Created folder: Project (4a3471435d4747648cd8ddf0817d9b85) in parent group-v295764. [ 882.551021] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Creating folder: Instances. Parent ref: group-v295816. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 882.551021] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76df1a35-3778-475d-85c5-26b67f667709 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.564025] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Created folder: Instances in parent group-v295816. [ 882.564025] env[61978]: DEBUG oslo.service.loopingcall [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 882.564025] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 882.564025] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f279c472-cd85-4cf3-af78-9713eaf4d28a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.589021] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.589021] env[61978]: value = "task-1394704" [ 882.589021] env[61978]: _type = "Task" [ 882.589021] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.597768] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394704, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.611111] env[61978]: DEBUG nova.scheduler.client.report [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 882.727421] env[61978]: INFO nova.compute.manager [-] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Took 1.03 seconds to deallocate network for instance. [ 882.795884] env[61978]: DEBUG nova.compute.manager [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Received event network-vif-plugged-f98d4797-3791-4132-9a71-1b520fa8e5e8 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.796304] env[61978]: DEBUG oslo_concurrency.lockutils [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] Acquiring lock "d2614f71-3026-41d4-ae04-eaede9b5ead5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.796799] env[61978]: DEBUG oslo_concurrency.lockutils [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] Lock "d2614f71-3026-41d4-ae04-eaede9b5ead5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.798527] env[61978]: DEBUG oslo_concurrency.lockutils [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] Lock "d2614f71-3026-41d4-ae04-eaede9b5ead5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.798527] env[61978]: DEBUG nova.compute.manager [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] No waiting events found dispatching network-vif-plugged-f98d4797-3791-4132-9a71-1b520fa8e5e8 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 882.798527] env[61978]: WARNING nova.compute.manager [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Received unexpected event network-vif-plugged-f98d4797-3791-4132-9a71-1b520fa8e5e8 for instance with vm_state building and task_state spawning. [ 882.798527] env[61978]: DEBUG nova.compute.manager [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Received event network-changed-f98d4797-3791-4132-9a71-1b520fa8e5e8 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.798998] env[61978]: DEBUG nova.compute.manager [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Refreshing instance network info cache due to event network-changed-f98d4797-3791-4132-9a71-1b520fa8e5e8. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 882.803333] env[61978]: DEBUG oslo_concurrency.lockutils [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] Acquiring lock "refresh_cache-d2614f71-3026-41d4-ae04-eaede9b5ead5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.803333] env[61978]: DEBUG oslo_concurrency.lockutils [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] Acquired lock "refresh_cache-d2614f71-3026-41d4-ae04-eaede9b5ead5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.803333] env[61978]: DEBUG nova.network.neutron [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Refreshing network info cache for port f98d4797-3791-4132-9a71-1b520fa8e5e8 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 882.838764] env[61978]: DEBUG nova.network.neutron [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Successfully created port: 2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 882.970986] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Acquiring lock "refresh_cache-2f5b06f6-7178-4fdf-93b6-65477f020898" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.971155] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Acquired lock "refresh_cache-2f5b06f6-7178-4fdf-93b6-65477f020898" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.971311] env[61978]: DEBUG nova.network.neutron [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.022449] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394701, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.040633] env[61978]: DEBUG oslo_concurrency.lockutils [req-5e165998-762e-471e-b74f-905ad947e34a req-101964fb-01e6-4dc3-ad8c-aa364c048aea service nova] Releasing lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.100631] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394704, 'name': CreateVM_Task, 'duration_secs': 0.369219} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.100996] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 883.101927] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.102307] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.103809] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 883.103809] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60e6f9c7-4e41-440b-9202-46c517459d26 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.115184] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 883.115184] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5278936b-7c00-0534-6f2e-fc94750489b4" [ 883.115184] env[61978]: _type = "Task" [ 883.115184] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.117842] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.121471] env[61978]: DEBUG nova.compute.manager [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 883.124177] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 15.269s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.135818] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5278936b-7c00-0534-6f2e-fc94750489b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.168259] env[61978]: DEBUG nova.virt.hardware [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 883.168483] env[61978]: DEBUG nova.virt.hardware [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 883.168645] env[61978]: DEBUG nova.virt.hardware [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.168810] env[61978]: DEBUG nova.virt.hardware [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 883.168959] env[61978]: DEBUG nova.virt.hardware [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.169122] env[61978]: DEBUG nova.virt.hardware [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 883.169334] env[61978]: DEBUG nova.virt.hardware [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 883.169924] env[61978]: DEBUG nova.virt.hardware [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 883.169924] env[61978]: DEBUG nova.virt.hardware [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 883.169924] env[61978]: DEBUG nova.virt.hardware [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 883.170080] env[61978]: DEBUG nova.virt.hardware [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 883.171264] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf20ffc-cb9f-41ff-a2e9-5a2c6ed7c09f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.176365] env[61978]: INFO nova.scheduler.client.report [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Deleted allocations for instance 5d48e854-45fd-4767-91b7-100f84bdca55 [ 883.189057] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69c09ce-cd03-40d9-9141-e43dc6c54b7a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.236586] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.521678] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394701, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.086434} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.521990] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] d2614f71-3026-41d4-ae04-eaede9b5ead5/d2614f71-3026-41d4-ae04-eaede9b5ead5.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 883.522187] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.522440] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fa9a3a04-a3d6-4232-b792-cadbd248cc3c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.532176] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 883.532176] env[61978]: value = "task-1394705" [ 883.532176] env[61978]: _type = "Task" [ 883.532176] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.544437] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394705, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.558042] env[61978]: DEBUG nova.network.neutron [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.625809] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5278936b-7c00-0534-6f2e-fc94750489b4, 'name': SearchDatastore_Task, 'duration_secs': 0.039841} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.628193] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.629980] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 883.631021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.631336] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.631532] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 883.634832] env[61978]: INFO nova.compute.claims [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.642460] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b3550c7-1b24-4860-b63c-3fd323b3264b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.658315] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.658511] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 883.659310] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2ae40a9-38fb-4fab-a7ba-5ffad9b76a75 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.669967] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 883.669967] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528176b6-fa53-ea22-bf6b-f63ef49b9cf0" [ 883.669967] env[61978]: _type = "Task" [ 883.669967] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.682780] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528176b6-fa53-ea22-bf6b-f63ef49b9cf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.690202] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ac519c4b-1dce-48d4-bbf9-da679853fb52 tempest-ServerDiagnosticsTest-1716779549 tempest-ServerDiagnosticsTest-1716779549-project-member] Lock "5d48e854-45fd-4767-91b7-100f84bdca55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.063s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.853729] env[61978]: DEBUG nova.network.neutron [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Updated VIF entry in instance network info cache for port f98d4797-3791-4132-9a71-1b520fa8e5e8. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 883.854130] env[61978]: DEBUG nova.network.neutron [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Updating instance_info_cache with network_info: [{"id": "f98d4797-3791-4132-9a71-1b520fa8e5e8", "address": "fa:16:3e:fa:11:fe", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.249", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf98d4797-37", "ovs_interfaceid": "f98d4797-3791-4132-9a71-1b520fa8e5e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.905788] env[61978]: DEBUG nova.network.neutron [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Updating instance_info_cache with network_info: [{"id": "51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f", "address": "fa:16:3e:7d:82:2a", "network": {"id": "0d7f3ee4-9789-4ca3-802e-627a975ec68f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-286598400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8499568efa184c35a99e2959f04273fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51cc84cf-63", "ovs_interfaceid": "51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.044694] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394705, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067074} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.045059] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 884.045852] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffa020e-033b-4632-9bf8-65a5ed8082fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.075128] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] d2614f71-3026-41d4-ae04-eaede9b5ead5/d2614f71-3026-41d4-ae04-eaede9b5ead5.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.075458] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e92b4813-d61a-4640-acc3-58e4bad8ef66 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.103218] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 884.103218] env[61978]: value = "task-1394706" [ 884.103218] env[61978]: _type = "Task" [ 884.103218] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.114051] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394706, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.141161] env[61978]: INFO nova.compute.resource_tracker [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating resource usage from migration a5c81d5c-f908-418a-92b4-c9cc3e2002af [ 884.184213] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528176b6-fa53-ea22-bf6b-f63ef49b9cf0, 'name': SearchDatastore_Task, 'duration_secs': 0.023956} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.187258] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1965390f-1de4-4b56-b813-15c200912a9d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.198475] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 884.198475] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52dbd289-76fd-120c-17b4-20dcf6fa0acb" [ 884.198475] env[61978]: _type = "Task" [ 884.198475] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.219473] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52dbd289-76fd-120c-17b4-20dcf6fa0acb, 'name': SearchDatastore_Task, 'duration_secs': 0.013621} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.220175] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.221545] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 96bef3f3-a45c-43ba-a86a-66c1d5686ea6/96bef3f3-a45c-43ba-a86a-66c1d5686ea6.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 884.221935] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d70207d-2861-45f1-add5-3730edc5a405 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.236736] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 884.236736] env[61978]: value = "task-1394707" [ 884.236736] env[61978]: _type = "Task" [ 884.236736] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.252808] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1394707, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.357487] env[61978]: DEBUG oslo_concurrency.lockutils [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] Releasing lock "refresh_cache-d2614f71-3026-41d4-ae04-eaede9b5ead5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.357751] env[61978]: DEBUG nova.compute.manager [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Received event network-vif-plugged-ebb15ef6-0310-4f67-8247-f09f03d452db {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.358059] env[61978]: DEBUG oslo_concurrency.lockutils [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] Acquiring lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.358175] env[61978]: DEBUG oslo_concurrency.lockutils [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.358333] env[61978]: DEBUG oslo_concurrency.lockutils [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.358495] env[61978]: DEBUG nova.compute.manager [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] No waiting events found dispatching network-vif-plugged-ebb15ef6-0310-4f67-8247-f09f03d452db {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 884.358656] env[61978]: WARNING nova.compute.manager [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Received unexpected event network-vif-plugged-ebb15ef6-0310-4f67-8247-f09f03d452db for instance with vm_state building and task_state spawning. [ 884.358813] env[61978]: DEBUG nova.compute.manager [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Received event network-changed-ebb15ef6-0310-4f67-8247-f09f03d452db {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.358965] env[61978]: DEBUG nova.compute.manager [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Refreshing instance network info cache due to event network-changed-ebb15ef6-0310-4f67-8247-f09f03d452db. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 884.359188] env[61978]: DEBUG oslo_concurrency.lockutils [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] Acquiring lock "refresh_cache-96bef3f3-a45c-43ba-a86a-66c1d5686ea6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.360329] env[61978]: DEBUG oslo_concurrency.lockutils [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] Acquired lock "refresh_cache-96bef3f3-a45c-43ba-a86a-66c1d5686ea6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.360329] env[61978]: DEBUG nova.network.neutron [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Refreshing network info cache for port ebb15ef6-0310-4f67-8247-f09f03d452db {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 884.411359] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Releasing lock "refresh_cache-2f5b06f6-7178-4fdf-93b6-65477f020898" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.411686] env[61978]: DEBUG nova.compute.manager [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Instance network_info: |[{"id": "51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f", "address": "fa:16:3e:7d:82:2a", "network": {"id": "0d7f3ee4-9789-4ca3-802e-627a975ec68f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-286598400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8499568efa184c35a99e2959f04273fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51cc84cf-63", "ovs_interfaceid": "51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 884.415143] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:82:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.421381] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Creating folder: Project (8499568efa184c35a99e2959f04273fc). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 884.421381] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-278d49f1-1f28-4f78-a2ae-24c838e66a9d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.435503] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Created folder: Project (8499568efa184c35a99e2959f04273fc) in parent group-v295764. [ 884.435920] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Creating folder: Instances. Parent ref: group-v295819. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 884.436363] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17149347-6854-4093-a1fe-02a06573c056 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.454474] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Created folder: Instances in parent group-v295819. [ 884.454474] env[61978]: DEBUG oslo.service.loopingcall [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.454474] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 884.454474] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10c6c4d2-d39c-43b2-81bc-69f6bec0cd84 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.486707] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.486707] env[61978]: value = "task-1394710" [ 884.486707] env[61978]: _type = "Task" [ 884.486707] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.497522] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394710, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.625180] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394706, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.685863] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfc79f5-6464-488a-81c2-e6de8cb1df44 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.697034] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f675d19-e2ac-4303-8ca2-6eb87f0914be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.736477] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5787f2ca-5a34-4c2b-8fa8-6003e6892ac6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.753032] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb66005-e03f-41e9-aa8d-0edc753a7368 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.758854] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1394707, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.772382] env[61978]: DEBUG nova.compute.provider_tree [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.004634] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394710, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.120765] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394706, 'name': ReconfigVM_Task, 'duration_secs': 0.698134} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.121290] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Reconfigured VM instance instance-00000011 to attach disk [datastore1] d2614f71-3026-41d4-ae04-eaede9b5ead5/d2614f71-3026-41d4-ae04-eaede9b5ead5.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.121782] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8bb86088-cc08-4934-97a6-3a96e338d40a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.131579] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 885.131579] env[61978]: value = "task-1394711" [ 885.131579] env[61978]: _type = "Task" [ 885.131579] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.147443] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394711, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.228823] env[61978]: DEBUG nova.network.neutron [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Updated VIF entry in instance network info cache for port ebb15ef6-0310-4f67-8247-f09f03d452db. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 885.228823] env[61978]: DEBUG nova.network.neutron [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Updating instance_info_cache with network_info: [{"id": "ebb15ef6-0310-4f67-8247-f09f03d452db", "address": "fa:16:3e:e2:a7:be", "network": {"id": "140282e9-127a-4f19-b6d1-6bea55474c67", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-904021562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a3471435d4747648cd8ddf0817d9b85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebb15ef6-03", "ovs_interfaceid": "ebb15ef6-0310-4f67-8247-f09f03d452db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.258806] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1394707, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.884803} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.261669] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 96bef3f3-a45c-43ba-a86a-66c1d5686ea6/96bef3f3-a45c-43ba-a86a-66c1d5686ea6.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 885.261669] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 885.261669] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c8848eb-11d0-4303-950e-7332abf6692f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.269074] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 885.269074] env[61978]: value = "task-1394712" [ 885.269074] env[61978]: _type = "Task" [ 885.269074] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.276527] env[61978]: DEBUG nova.scheduler.client.report [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 885.284692] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1394712, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.502531] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394710, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.582393] env[61978]: DEBUG nova.compute.manager [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Received event network-changed-a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 885.582393] env[61978]: DEBUG nova.compute.manager [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Refreshing instance network info cache due to event network-changed-a5290cfd-6d88-4c49-a54c-626d4c4843bd. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 885.582393] env[61978]: DEBUG oslo_concurrency.lockutils [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] Acquiring lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.582393] env[61978]: DEBUG oslo_concurrency.lockutils [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] Acquired lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.582393] env[61978]: DEBUG nova.network.neutron [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Refreshing network info cache for port a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 885.644582] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394711, 'name': Rename_Task, 'duration_secs': 0.300426} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.644996] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.645514] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed71934b-83f6-4436-9de4-cf888deac89a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.654619] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 885.654619] env[61978]: value = "task-1394713" [ 885.654619] env[61978]: _type = "Task" [ 885.654619] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.666738] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394713, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.729993] env[61978]: DEBUG oslo_concurrency.lockutils [req-1d300c65-0b72-42fd-bd9a-d4a55e0898aa req-839e6e20-45fd-4706-a987-fea40860b358 service nova] Releasing lock "refresh_cache-96bef3f3-a45c-43ba-a86a-66c1d5686ea6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.784216] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1394712, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.460677} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.784764] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.785658] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143bdc49-3e02-4404-adb6-82d3074f1e3d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.790376] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.666s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.790376] env[61978]: INFO nova.compute.manager [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Migrating [ 885.791953] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.791953] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "compute-rpcapi-router" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.793254] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.871s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.795280] env[61978]: INFO nova.compute.claims [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.802039] env[61978]: INFO nova.compute.rpcapi [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 885.802039] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "compute-rpcapi-router" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.830594] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 96bef3f3-a45c-43ba-a86a-66c1d5686ea6/96bef3f3-a45c-43ba-a86a-66c1d5686ea6.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.836448] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a6517a5-e7a1-49b1-a8c8-418e30879c2d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.867918] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 885.867918] env[61978]: value = "task-1394714" [ 885.867918] env[61978]: _type = "Task" [ 885.867918] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.881641] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1394714, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.969582] env[61978]: DEBUG nova.network.neutron [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Successfully updated port: 2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 886.008819] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394710, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.164602] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394713, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.359127] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.359402] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.359659] env[61978]: DEBUG nova.network.neutron [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.380083] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1394714, 'name': ReconfigVM_Task, 'duration_secs': 0.373229} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.380537] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 96bef3f3-a45c-43ba-a86a-66c1d5686ea6/96bef3f3-a45c-43ba-a86a-66c1d5686ea6.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 886.381276] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86da1759-c533-44ec-9189-535f5ed93359 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.389417] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 886.389417] env[61978]: value = "task-1394715" [ 886.389417] env[61978]: _type = "Task" [ 886.389417] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.401178] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1394715, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.474840] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Acquiring lock "refresh_cache-a4d45835-f065-445f-bcb6-d1b01d545cb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.475225] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Acquired lock "refresh_cache-a4d45835-f065-445f-bcb6-d1b01d545cb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.475998] env[61978]: DEBUG nova.network.neutron [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.504846] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394710, 'name': CreateVM_Task, 'duration_secs': 1.60419} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.505263] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 886.507296] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.511990] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.511990] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 886.511990] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b32d554-8b44-45ec-96dd-50056efe6cf3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.521024] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Waiting for the task: (returnval){ [ 886.521024] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e014bf-d58c-471c-8f38-524f559e4196" [ 886.521024] env[61978]: _type = "Task" [ 886.521024] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.530180] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e014bf-d58c-471c-8f38-524f559e4196, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.668677] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394713, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.713568] env[61978]: DEBUG nova.compute.manager [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Received event network-vif-plugged-51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 886.713568] env[61978]: DEBUG oslo_concurrency.lockutils [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] Acquiring lock "2f5b06f6-7178-4fdf-93b6-65477f020898-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.713568] env[61978]: DEBUG oslo_concurrency.lockutils [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] Lock "2f5b06f6-7178-4fdf-93b6-65477f020898-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.713568] env[61978]: DEBUG oslo_concurrency.lockutils [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] Lock "2f5b06f6-7178-4fdf-93b6-65477f020898-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.713568] env[61978]: DEBUG nova.compute.manager [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] No waiting events found dispatching network-vif-plugged-51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 886.713757] env[61978]: WARNING nova.compute.manager [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Received unexpected event network-vif-plugged-51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f for instance with vm_state building and task_state spawning. [ 886.713757] env[61978]: DEBUG nova.compute.manager [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Received event network-changed-51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 886.713757] env[61978]: DEBUG nova.compute.manager [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Refreshing instance network info cache due to event network-changed-51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 886.713914] env[61978]: DEBUG oslo_concurrency.lockutils [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] Acquiring lock "refresh_cache-2f5b06f6-7178-4fdf-93b6-65477f020898" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.714113] env[61978]: DEBUG oslo_concurrency.lockutils [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] Acquired lock "refresh_cache-2f5b06f6-7178-4fdf-93b6-65477f020898" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.714301] env[61978]: DEBUG nova.network.neutron [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Refreshing network info cache for port 51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 886.826021] env[61978]: DEBUG nova.network.neutron [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updated VIF entry in instance network info cache for port a5290cfd-6d88-4c49-a54c-626d4c4843bd. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 886.826021] env[61978]: DEBUG nova.network.neutron [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updating instance_info_cache with network_info: [{"id": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "address": "fa:16:3e:ed:43:9e", "network": {"id": "4b7af29e-cc4a-4765-8d05-6adf88573ad3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1006801081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00c674bbf1e945ba946d844f9856fdfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5290cfd-6d", "ovs_interfaceid": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.912190] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1394715, 'name': Rename_Task, 'duration_secs': 0.155401} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.912498] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 886.912751] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe2f2080-ba80-40da-a4e4-e889cdcf5c69 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.923038] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 886.923038] env[61978]: value = "task-1394716" [ 886.923038] env[61978]: _type = "Task" [ 886.923038] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.932547] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1394716, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.038125] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e014bf-d58c-471c-8f38-524f559e4196, 'name': SearchDatastore_Task, 'duration_secs': 0.034206} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.039433] env[61978]: DEBUG nova.network.neutron [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.042733] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.042980] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 887.043265] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.043463] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.043685] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 887.043992] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc14e5f9-4c83-4e8c-973c-d736ce58fdb8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.133461] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 887.133761] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 887.134729] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b13a8d14-cfa5-43ec-bde5-aa83db13541e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.142656] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Waiting for the task: (returnval){ [ 887.142656] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]529a593c-f5ef-781c-4461-fc8f1b627a71" [ 887.142656] env[61978]: _type = "Task" [ 887.142656] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.158051] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529a593c-f5ef-781c-4461-fc8f1b627a71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.171356] env[61978]: DEBUG oslo_vmware.api [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394713, 'name': PowerOnVM_Task, 'duration_secs': 1.214428} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.171678] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 887.172100] env[61978]: INFO nova.compute.manager [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Took 12.77 seconds to spawn the instance on the hypervisor. [ 887.172275] env[61978]: DEBUG nova.compute.manager [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 887.174467] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dea0bcd-5f68-4137-8ebb-a90a79e8023b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.275738] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa56724f-d0cf-4434-aa15-b189997fb65c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.286682] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b7ef02-8f92-4440-b315-7aedd129fd4f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.330037] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18779899-7546-4165-87ee-9c2c99c19782 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.333544] env[61978]: DEBUG oslo_concurrency.lockutils [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] Releasing lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.333792] env[61978]: DEBUG nova.compute.manager [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Received event network-changed-51f0ce4a-1710-4256-9ca8-ac173927565b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 887.334020] env[61978]: DEBUG nova.compute.manager [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Refreshing instance network info cache due to event network-changed-51f0ce4a-1710-4256-9ca8-ac173927565b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 887.334249] env[61978]: DEBUG oslo_concurrency.lockutils [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] Acquiring lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.334394] env[61978]: DEBUG oslo_concurrency.lockutils [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] Acquired lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.334664] env[61978]: DEBUG nova.network.neutron [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Refreshing network info cache for port 51f0ce4a-1710-4256-9ca8-ac173927565b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 887.343371] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c613888a-46d5-4730-9723-c269b3846622 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.365861] env[61978]: DEBUG nova.compute.provider_tree [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 887.451298] env[61978]: DEBUG oslo_vmware.api [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1394716, 'name': PowerOnVM_Task, 'duration_secs': 0.494964} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.451566] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 887.451771] env[61978]: INFO nova.compute.manager [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Took 9.45 seconds to spawn the instance on the hypervisor. [ 887.451975] env[61978]: DEBUG nova.compute.manager [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 887.452795] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf0e970-a32f-4894-9cfd-b9f726ff730a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.655016] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529a593c-f5ef-781c-4461-fc8f1b627a71, 'name': SearchDatastore_Task, 'duration_secs': 0.290706} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.655639] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edcf5071-9341-4a69-8c94-701f619ea122 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.661498] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Waiting for the task: (returnval){ [ 887.661498] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52559e3f-51d9-8b0f-60ff-defef457146e" [ 887.661498] env[61978]: _type = "Task" [ 887.661498] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.673325] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52559e3f-51d9-8b0f-60ff-defef457146e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.681230] env[61978]: DEBUG nova.network.neutron [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Updating instance_info_cache with network_info: [{"id": "2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee", "address": "fa:16:3e:15:3e:22", "network": {"id": "673a2a81-5be3-46b9-92e1-55c6b138a1f8", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1649618097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82ec8701f6504322a0d00feb6c15f0be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a7c8ac4-8b", "ovs_interfaceid": "2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.710160] env[61978]: INFO nova.compute.manager [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Took 27.56 seconds to build instance. [ 887.760826] env[61978]: DEBUG nova.network.neutron [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance_info_cache with network_info: [{"id": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "address": "fa:16:3e:94:aa:92", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a886f4f-5f", "ovs_interfaceid": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.845995] env[61978]: DEBUG nova.network.neutron [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Updated VIF entry in instance network info cache for port 51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 887.846524] env[61978]: DEBUG nova.network.neutron [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Updating instance_info_cache with network_info: [{"id": "51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f", "address": "fa:16:3e:7d:82:2a", "network": {"id": "0d7f3ee4-9789-4ca3-802e-627a975ec68f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-286598400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8499568efa184c35a99e2959f04273fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51cc84cf-63", "ovs_interfaceid": "51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.897738] env[61978]: ERROR nova.scheduler.client.report [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [req-522daad2-2f04-4386-91cb-2171384414a5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-522daad2-2f04-4386-91cb-2171384414a5"}]} [ 887.927308] env[61978]: DEBUG nova.scheduler.client.report [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 887.943735] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Acquiring lock "e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.943735] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Lock "e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.955015] env[61978]: DEBUG nova.scheduler.client.report [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 887.955289] env[61978]: DEBUG nova.compute.provider_tree [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 887.976371] env[61978]: INFO nova.compute.manager [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Took 24.14 seconds to build instance. [ 887.977713] env[61978]: DEBUG nova.scheduler.client.report [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 888.011348] env[61978]: DEBUG nova.scheduler.client.report [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 888.183126] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52559e3f-51d9-8b0f-60ff-defef457146e, 'name': SearchDatastore_Task, 'duration_secs': 0.43226} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.187977] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.188079] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 2f5b06f6-7178-4fdf-93b6-65477f020898/2f5b06f6-7178-4fdf-93b6-65477f020898.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 888.188534] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Releasing lock "refresh_cache-a4d45835-f065-445f-bcb6-d1b01d545cb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.188788] env[61978]: DEBUG nova.compute.manager [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Instance network_info: |[{"id": "2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee", "address": "fa:16:3e:15:3e:22", "network": {"id": "673a2a81-5be3-46b9-92e1-55c6b138a1f8", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1649618097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82ec8701f6504322a0d00feb6c15f0be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a7c8ac4-8b", "ovs_interfaceid": "2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 888.189509] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67a185ba-9776-488a-8ce2-d663ca66b3f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.191791] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:3e:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c42bb08a-77b4-4bba-8166-702cbb1b5f1e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.200718] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Creating folder: Project (82ec8701f6504322a0d00feb6c15f0be). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.202132] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a5645d6-4d25-46d9-bc50-e23206e178c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.211790] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Waiting for the task: (returnval){ [ 888.211790] env[61978]: value = "task-1394718" [ 888.211790] env[61978]: _type = "Task" [ 888.211790] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.212460] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7fec9f5f-d040-40d1-917b-800dd19fc2ae tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "d2614f71-3026-41d4-ae04-eaede9b5ead5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.552s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.220957] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Created folder: Project (82ec8701f6504322a0d00feb6c15f0be) in parent group-v295764. [ 888.221202] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Creating folder: Instances. Parent ref: group-v295822. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.221724] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b809a7e5-a537-4f6b-96bb-b945bb19291d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.231745] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.234867] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Created folder: Instances in parent group-v295822. [ 888.235196] env[61978]: DEBUG oslo.service.loopingcall [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 888.236325] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 888.236325] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7f925f9-1247-449e-a342-9412cdb44ae2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.260485] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.260485] env[61978]: value = "task-1394720" [ 888.260485] env[61978]: _type = "Task" [ 888.260485] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.268042] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.276615] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394720, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.354478] env[61978]: DEBUG oslo_concurrency.lockutils [req-14566b51-984a-47ff-93c2-7d161b92f062 req-077b4831-66e7-4d63-8557-1dc7c9704713 service nova] Releasing lock "refresh_cache-2f5b06f6-7178-4fdf-93b6-65477f020898" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.476704] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adeead2b-192f-4114-b8c1-d5b01b368ef6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.481329] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e714c362-47f8-413d-bff1-6d16b22819f9 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.433s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.490808] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a618f8-bb22-4e22-b4b1-9f2acbe7c89a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.523673] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3e3726-f3e0-4705-af63-09b8b75dd75c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.533453] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e50250a-26f4-4ca4-bd47-d22ea752a440 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.552279] env[61978]: DEBUG nova.compute.provider_tree [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 888.655132] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Acquiring lock "32bcb974-8db9-43e2-b397-b497f3a4f30c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.655763] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Lock "32bcb974-8db9-43e2-b397-b497f3a4f30c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.722036] env[61978]: DEBUG nova.compute.manager [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 888.729743] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394718, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.779921] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394720, 'name': CreateVM_Task, 'duration_secs': 0.484044} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.780139] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 888.780862] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.781044] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.781377] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 888.781638] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a54dc230-2eb5-41ba-8821-a7e284361a9c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.788611] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Waiting for the task: (returnval){ [ 888.788611] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522f08e9-1852-73ae-d014-dc89db26fd2a" [ 888.788611] env[61978]: _type = "Task" [ 888.788611] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.798150] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522f08e9-1852-73ae-d014-dc89db26fd2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.960141] env[61978]: DEBUG nova.network.neutron [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Updated VIF entry in instance network info cache for port 51f0ce4a-1710-4256-9ca8-ac173927565b. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 888.960573] env[61978]: DEBUG nova.network.neutron [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Updating instance_info_cache with network_info: [{"id": "51f0ce4a-1710-4256-9ca8-ac173927565b", "address": "fa:16:3e:8e:77:cc", "network": {"id": "4b7af29e-cc4a-4765-8d05-6adf88573ad3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1006801081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00c674bbf1e945ba946d844f9856fdfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51f0ce4a-17", "ovs_interfaceid": "51f0ce4a-1710-4256-9ca8-ac173927565b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.092613] env[61978]: ERROR nova.scheduler.client.report [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [req-574de3b7-b0b3-4f5e-a992-ae53254f7710] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-574de3b7-b0b3-4f5e-a992-ae53254f7710"}]} [ 889.093389] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Acquiring lock "081339d7-6d9b-4b66-a816-467d23196c9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.093628] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Lock "081339d7-6d9b-4b66-a816-467d23196c9a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.094252] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Acquiring lock "081339d7-6d9b-4b66-a816-467d23196c9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.094252] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Lock "081339d7-6d9b-4b66-a816-467d23196c9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.094429] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Lock "081339d7-6d9b-4b66-a816-467d23196c9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.098857] env[61978]: INFO nova.compute.manager [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Terminating instance [ 889.100742] env[61978]: DEBUG nova.compute.manager [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 889.100937] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 889.101807] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ae076c-fac1-4119-8e2f-62912cba860b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.111790] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 889.114665] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2ebc64a-8b3f-4906-99a9-a60fcdc328d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.115415] env[61978]: DEBUG nova.scheduler.client.report [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 889.120429] env[61978]: DEBUG oslo_vmware.api [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Waiting for the task: (returnval){ [ 889.120429] env[61978]: value = "task-1394721" [ 889.120429] env[61978]: _type = "Task" [ 889.120429] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.130310] env[61978]: DEBUG oslo_vmware.api [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394721, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.131856] env[61978]: DEBUG nova.scheduler.client.report [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 889.132084] env[61978]: DEBUG nova.compute.provider_tree [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 889.152340] env[61978]: DEBUG nova.scheduler.client.report [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 889.159032] env[61978]: DEBUG nova.compute.manager [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 889.178936] env[61978]: DEBUG nova.scheduler.client.report [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 889.234431] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394718, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.687797} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.234431] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 2f5b06f6-7178-4fdf-93b6-65477f020898/2f5b06f6-7178-4fdf-93b6-65477f020898.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 889.234431] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 889.234896] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1f03b22-0f34-47b5-8dcd-bc117c322ff1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.243478] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Waiting for the task: (returnval){ [ 889.243478] env[61978]: value = "task-1394722" [ 889.243478] env[61978]: _type = "Task" [ 889.243478] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.250904] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.258210] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394722, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.310025] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522f08e9-1852-73ae-d014-dc89db26fd2a, 'name': SearchDatastore_Task, 'duration_secs': 0.06585} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.310025] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.310025] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 889.310025] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.310759] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.310759] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 889.310759] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2d799c2-d11a-426c-a674-3c3bb21d337a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.330326] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 889.330556] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 889.331671] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cd04fd8-a496-47aa-800b-68761383b092 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.338960] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Waiting for the task: (returnval){ [ 889.338960] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522a5b1d-66dd-9d66-60d9-57653119a658" [ 889.338960] env[61978]: _type = "Task" [ 889.338960] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.352748] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522a5b1d-66dd-9d66-60d9-57653119a658, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.464217] env[61978]: DEBUG oslo_concurrency.lockutils [req-4c30b363-a300-4212-ae5a-2cf1539a7dd6 req-38de744d-b26d-4d1b-b597-4eaeaa33e734 service nova] Releasing lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.633321] env[61978]: DEBUG oslo_vmware.api [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394721, 'name': PowerOffVM_Task, 'duration_secs': 0.232989} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.633321] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 889.633321] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 889.633321] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae7a82c2-b32b-4607-b9d5-89d0903ee9a3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.669386] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b275fa-56da-4199-b8ad-457362240a7c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.679470] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa6690a-46f5-4fe3-a525-cccd596d06a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.728416] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.732623] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398ec16d-c274-4ee1-97d9-d40ce1090c76 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.741313] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 889.741313] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 889.741313] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Deleting the datastore file [datastore2] 081339d7-6d9b-4b66-a816-467d23196c9a {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 889.741811] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8aee160-9fef-4e60-a403-da05afd741a6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.750895] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c6017b-77d9-4f63-8c6d-a89c2f8714c3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.757047] env[61978]: DEBUG oslo_vmware.api [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Waiting for the task: (returnval){ [ 889.757047] env[61978]: value = "task-1394724" [ 889.757047] env[61978]: _type = "Task" [ 889.757047] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.774477] env[61978]: DEBUG nova.compute.provider_tree [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.779024] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394722, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.154594} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.779024] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 889.779024] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc2dc40-6cbc-479e-97d1-5b434c361c38 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.786508] env[61978]: DEBUG oslo_vmware.api [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394724, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.788849] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35eee72c-d93c-473f-99ba-82cdda6b3333 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.813845] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 2f5b06f6-7178-4fdf-93b6-65477f020898/2f5b06f6-7178-4fdf-93b6-65477f020898.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 889.813845] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-530187e5-f326-40c0-a2f6-f29e077c2502 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.848943] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance 'b26a4784-698d-477a-8db7-58156899d231' progress to 0 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 889.862962] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Waiting for the task: (returnval){ [ 889.862962] env[61978]: value = "task-1394725" [ 889.862962] env[61978]: _type = "Task" [ 889.862962] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.867439] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522a5b1d-66dd-9d66-60d9-57653119a658, 'name': SearchDatastore_Task, 'duration_secs': 0.014487} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.868828] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-089a904b-7db7-4821-aa96-6c85498cb3c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.877467] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394725, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.885026] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Waiting for the task: (returnval){ [ 889.885026] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520be911-348c-4bae-0069-c355111e0bce" [ 889.885026] env[61978]: _type = "Task" [ 889.885026] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.893924] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520be911-348c-4bae-0069-c355111e0bce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.244971] env[61978]: DEBUG nova.compute.manager [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Received event network-vif-plugged-2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 890.245385] env[61978]: DEBUG oslo_concurrency.lockutils [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] Acquiring lock "a4d45835-f065-445f-bcb6-d1b01d545cb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.245656] env[61978]: DEBUG oslo_concurrency.lockutils [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] Lock "a4d45835-f065-445f-bcb6-d1b01d545cb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.245721] env[61978]: DEBUG oslo_concurrency.lockutils [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] Lock "a4d45835-f065-445f-bcb6-d1b01d545cb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.245967] env[61978]: DEBUG nova.compute.manager [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] No waiting events found dispatching network-vif-plugged-2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 890.247084] env[61978]: WARNING nova.compute.manager [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Received unexpected event network-vif-plugged-2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee for instance with vm_state building and task_state spawning. [ 890.248555] env[61978]: DEBUG nova.compute.manager [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Received event network-changed-2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 890.248555] env[61978]: DEBUG nova.compute.manager [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Refreshing instance network info cache due to event network-changed-2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 890.248555] env[61978]: DEBUG oslo_concurrency.lockutils [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] Acquiring lock "refresh_cache-a4d45835-f065-445f-bcb6-d1b01d545cb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.248555] env[61978]: DEBUG oslo_concurrency.lockutils [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] Acquired lock "refresh_cache-a4d45835-f065-445f-bcb6-d1b01d545cb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.248555] env[61978]: DEBUG nova.network.neutron [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Refreshing network info cache for port 2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.272698] env[61978]: DEBUG oslo_vmware.api [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Task: {'id': task-1394724, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157591} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.272938] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 890.273131] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 890.273307] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 890.273490] env[61978]: INFO nova.compute.manager [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Took 1.17 seconds to destroy the instance on the hypervisor. [ 890.273750] env[61978]: DEBUG oslo.service.loopingcall [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.273939] env[61978]: DEBUG nova.compute.manager [-] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 890.274069] env[61978]: DEBUG nova.network.neutron [-] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 890.276924] env[61978]: DEBUG nova.scheduler.client.report [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 890.361356] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.361356] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed5db613-6a62-4560-be8e-a08cbf54bf92 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.379112] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394725, 'name': ReconfigVM_Task, 'duration_secs': 0.294582} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.380572] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 2f5b06f6-7178-4fdf-93b6-65477f020898/2f5b06f6-7178-4fdf-93b6-65477f020898.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 890.382565] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 890.382565] env[61978]: value = "task-1394726" [ 890.382565] env[61978]: _type = "Task" [ 890.382565] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.382565] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bf98cb7-cdc7-47ac-ae49-dfad682b1375 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.396269] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394726, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.403588] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Waiting for the task: (returnval){ [ 890.403588] env[61978]: value = "task-1394727" [ 890.403588] env[61978]: _type = "Task" [ 890.403588] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.404245] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520be911-348c-4bae-0069-c355111e0bce, 'name': SearchDatastore_Task, 'duration_secs': 0.022376} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.404688] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.405677] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] a4d45835-f065-445f-bcb6-d1b01d545cb0/a4d45835-f065-445f-bcb6-d1b01d545cb0.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 890.412076] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f22f2ec6-22e4-4131-9931-1fab35a7cbbd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.420682] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394727, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.422561] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Waiting for the task: (returnval){ [ 890.422561] env[61978]: value = "task-1394728" [ 890.422561] env[61978]: _type = "Task" [ 890.422561] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.432398] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394728, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.784779] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.990s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.784779] env[61978]: DEBUG nova.compute.manager [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 890.789251] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.453s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.791647] env[61978]: INFO nova.compute.claims [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.896247] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394726, 'name': PowerOffVM_Task, 'duration_secs': 0.322386} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.899497] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 890.900576] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance 'b26a4784-698d-477a-8db7-58156899d231' progress to 17 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 890.924425] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394727, 'name': Rename_Task, 'duration_secs': 0.323128} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.927444] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 890.928106] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea78cb38-ffb6-4000-8d6f-3e4c9d9ea787 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.942245] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394728, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.942463] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Waiting for the task: (returnval){ [ 890.942463] env[61978]: value = "task-1394729" [ 890.942463] env[61978]: _type = "Task" [ 890.942463] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.955029] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394729, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.290832] env[61978]: DEBUG nova.compute.utils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 891.295311] env[61978]: DEBUG nova.compute.manager [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 891.295311] env[61978]: DEBUG nova.network.neutron [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 891.360601] env[61978]: DEBUG nova.network.neutron [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Updated VIF entry in instance network info cache for port 2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.363210] env[61978]: DEBUG nova.network.neutron [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Updating instance_info_cache with network_info: [{"id": "2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee", "address": "fa:16:3e:15:3e:22", "network": {"id": "673a2a81-5be3-46b9-92e1-55c6b138a1f8", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1649618097-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82ec8701f6504322a0d00feb6c15f0be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a7c8ac4-8b", "ovs_interfaceid": "2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.395988] env[61978]: DEBUG nova.policy [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e9bddae208e94dbd8b1a6b188e7f651f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '198c0b3be81b48e08fbbcd5e637eecbc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 891.412974] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 891.413237] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 891.413426] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.413622] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 891.413765] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.413905] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 891.414335] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 891.414511] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 891.414729] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 891.414850] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 891.415036] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 891.422120] env[61978]: DEBUG nova.network.neutron [-] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.422414] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baf7d76a-ffb8-4c01-ba03-6bd06b4376a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.452934] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 891.452934] env[61978]: value = "task-1394730" [ 891.452934] env[61978]: _type = "Task" [ 891.452934] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.453172] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394728, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.71622} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.454063] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] a4d45835-f065-445f-bcb6-d1b01d545cb0/a4d45835-f065-445f-bcb6-d1b01d545cb0.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 891.454220] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 891.457586] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-723bbceb-f7c3-4d6e-b00e-4d94caec438e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.463050] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394729, 'name': PowerOnVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.471600] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.473212] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Waiting for the task: (returnval){ [ 891.473212] env[61978]: value = "task-1394731" [ 891.473212] env[61978]: _type = "Task" [ 891.473212] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.482465] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394731, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.805182] env[61978]: DEBUG nova.compute.manager [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 891.865109] env[61978]: DEBUG oslo_concurrency.lockutils [req-1e3adcb9-ba3a-439d-9501-5cbef1bc43ea req-08f05a77-1192-4399-91c3-8ecad30fa3bf service nova] Releasing lock "refresh_cache-a4d45835-f065-445f-bcb6-d1b01d545cb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.937345] env[61978]: INFO nova.compute.manager [-] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Took 1.66 seconds to deallocate network for instance. [ 891.949370] env[61978]: DEBUG nova.network.neutron [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Successfully created port: a4813ec3-c879-4bdb-939f-21a96adfecf2 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 891.971158] env[61978]: DEBUG oslo_vmware.api [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394729, 'name': PowerOnVM_Task, 'duration_secs': 0.622808} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.971158] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394730, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.971158] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 891.971158] env[61978]: INFO nova.compute.manager [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Took 11.44 seconds to spawn the instance on the hypervisor. [ 891.971484] env[61978]: DEBUG nova.compute.manager [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 891.972452] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e0c707-717e-411c-9c7b-e9a3e475d1c6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.997174] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394731, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.196063} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.999966] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 892.002969] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b490432-5d28-4c4a-99b0-00447ffb8c9d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.034315] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] a4d45835-f065-445f-bcb6-d1b01d545cb0/a4d45835-f065-445f-bcb6-d1b01d545cb0.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 892.035618] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61f071cb-42b5-4371-b7eb-0be1cd944862 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.059889] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Waiting for the task: (returnval){ [ 892.059889] env[61978]: value = "task-1394732" [ 892.059889] env[61978]: _type = "Task" [ 892.059889] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.070951] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394732, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.361431] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd02f58a-4243-4c5d-8042-0f8840a21334 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.379303] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4189b2b2-b4a8-4c9d-b4cf-fd2edb89e02e tempest-ServersAdminNegativeTestJSON-1556791100 tempest-ServersAdminNegativeTestJSON-1556791100-project-admin] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Suspending the VM {{(pid=61978) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 892.379303] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c07868ba-bd28-4332-9b1e-ecb5cc94decc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.387091] env[61978]: DEBUG oslo_vmware.api [None req-4189b2b2-b4a8-4c9d-b4cf-fd2edb89e02e tempest-ServersAdminNegativeTestJSON-1556791100 tempest-ServersAdminNegativeTestJSON-1556791100-project-admin] Waiting for the task: (returnval){ [ 892.387091] env[61978]: value = "task-1394733" [ 892.387091] env[61978]: _type = "Task" [ 892.387091] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.401117] env[61978]: DEBUG oslo_vmware.api [None req-4189b2b2-b4a8-4c9d-b4cf-fd2edb89e02e tempest-ServersAdminNegativeTestJSON-1556791100 tempest-ServersAdminNegativeTestJSON-1556791100-project-admin] Task: {'id': task-1394733, 'name': SuspendVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.451330] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.452487] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b9f124-d379-4dd2-8524-17c699ca492c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.476463] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4141cb7f-3aa1-4104-8190-2eb792c89229 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.480287] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394730, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.514457] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Acquiring lock "b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.514761] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Lock "b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.528025] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee4fb73-430b-4eb5-bd9e-30946b659dfd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.531904] env[61978]: INFO nova.compute.manager [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Took 27.70 seconds to build instance. [ 892.547123] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1159fec7-d0ac-4234-84b1-c07429fc7491 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.576551] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.587822] env[61978]: DEBUG nova.compute.provider_tree [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.662082] env[61978]: DEBUG oslo_concurrency.lockutils [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "a0762952-2afd-448a-8e46-ba788a4ca131" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.662396] env[61978]: DEBUG oslo_concurrency.lockutils [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "a0762952-2afd-448a-8e46-ba788a4ca131" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.662659] env[61978]: DEBUG oslo_concurrency.lockutils [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "a0762952-2afd-448a-8e46-ba788a4ca131-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.662882] env[61978]: DEBUG oslo_concurrency.lockutils [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "a0762952-2afd-448a-8e46-ba788a4ca131-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.665958] env[61978]: DEBUG oslo_concurrency.lockutils [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "a0762952-2afd-448a-8e46-ba788a4ca131-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.665958] env[61978]: INFO nova.compute.manager [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Terminating instance [ 892.668911] env[61978]: DEBUG nova.compute.manager [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 892.669146] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 892.670887] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5edcf2e-8942-457b-aa42-72dcd3fb854a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.682438] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.683658] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10aec337-2bcc-4571-97db-da86712b65ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.694027] env[61978]: DEBUG oslo_vmware.api [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 892.694027] env[61978]: value = "task-1394734" [ 892.694027] env[61978]: _type = "Task" [ 892.694027] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.702590] env[61978]: DEBUG oslo_vmware.api [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394734, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.823140] env[61978]: DEBUG nova.compute.manager [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 892.869160] env[61978]: DEBUG nova.virt.hardware [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 892.869288] env[61978]: DEBUG nova.virt.hardware [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 892.869451] env[61978]: DEBUG nova.virt.hardware [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 892.869632] env[61978]: DEBUG nova.virt.hardware [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 892.869778] env[61978]: DEBUG nova.virt.hardware [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 892.870407] env[61978]: DEBUG nova.virt.hardware [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 892.870688] env[61978]: DEBUG nova.virt.hardware [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 892.870920] env[61978]: DEBUG nova.virt.hardware [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 892.871043] env[61978]: DEBUG nova.virt.hardware [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 892.871208] env[61978]: DEBUG nova.virt.hardware [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 892.871409] env[61978]: DEBUG nova.virt.hardware [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 892.872430] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f22088-982e-42d8-aa3c-0072294cbfe8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.883245] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba26105e-fd8f-46dc-9b28-eeede33cb4a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.903443] env[61978]: DEBUG oslo_vmware.api [None req-4189b2b2-b4a8-4c9d-b4cf-fd2edb89e02e tempest-ServersAdminNegativeTestJSON-1556791100 tempest-ServersAdminNegativeTestJSON-1556791100-project-admin] Task: {'id': task-1394733, 'name': SuspendVM_Task} progress is 37%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.968186] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394730, 'name': ReconfigVM_Task, 'duration_secs': 1.394103} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.969865] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance 'b26a4784-698d-477a-8db7-58156899d231' progress to 33 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 893.034224] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2826be0e-f693-4eef-9c06-40e65f08f6a1 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Lock "2f5b06f6-7178-4fdf-93b6-65477f020898" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.100s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.078045] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394732, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.096504] env[61978]: DEBUG nova.scheduler.client.report [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 893.205487] env[61978]: DEBUG oslo_vmware.api [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394734, 'name': PowerOffVM_Task, 'duration_secs': 0.390504} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.205487] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.205487] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 893.205487] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c09f2b3-5601-4ed6-b9b8-ddd682aadfc3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.401273] env[61978]: DEBUG oslo_vmware.api [None req-4189b2b2-b4a8-4c9d-b4cf-fd2edb89e02e tempest-ServersAdminNegativeTestJSON-1556791100 tempest-ServersAdminNegativeTestJSON-1556791100-project-admin] Task: {'id': task-1394733, 'name': SuspendVM_Task, 'duration_secs': 0.830629} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.401580] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4189b2b2-b4a8-4c9d-b4cf-fd2edb89e02e tempest-ServersAdminNegativeTestJSON-1556791100 tempest-ServersAdminNegativeTestJSON-1556791100-project-admin] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Suspended the VM {{(pid=61978) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 893.401851] env[61978]: DEBUG nova.compute.manager [None req-4189b2b2-b4a8-4c9d-b4cf-fd2edb89e02e tempest-ServersAdminNegativeTestJSON-1556791100 tempest-ServersAdminNegativeTestJSON-1556791100-project-admin] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 893.405250] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6810d23b-4229-4b14-90a4-64af383c4f53 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.477514] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 893.479272] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 893.479272] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 893.479272] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 893.479272] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 893.479272] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 893.479272] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 893.480034] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 893.480034] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 893.480281] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 893.480604] env[61978]: DEBUG nova.virt.hardware [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 893.487169] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Reconfiguring VM instance instance-00000008 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 893.487839] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8bdf5a9-7642-47d2-8cdf-2011f4556007 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.513707] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 893.513707] env[61978]: value = "task-1394736" [ 893.513707] env[61978]: _type = "Task" [ 893.513707] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.523807] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394736, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.536502] env[61978]: DEBUG nova.compute.manager [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 893.575817] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394732, 'name': ReconfigVM_Task, 'duration_secs': 1.040376} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.576481] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Reconfigured VM instance instance-00000014 to attach disk [datastore1] a4d45835-f065-445f-bcb6-d1b01d545cb0/a4d45835-f065-445f-bcb6-d1b01d545cb0.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 893.577382] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-188fdcdf-1daf-4688-bf5b-7e5d3e463f2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.587067] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Waiting for the task: (returnval){ [ 893.587067] env[61978]: value = "task-1394737" [ 893.587067] env[61978]: _type = "Task" [ 893.587067] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.599870] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394737, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.603084] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.814s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.603587] env[61978]: DEBUG nova.compute.manager [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 893.607258] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.574s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.607530] env[61978]: DEBUG nova.objects.instance [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Lazy-loading 'resources' on Instance uuid 2084a365-b662-4564-b899-ab4c4a63f2b9 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.852591] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.852865] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.852865] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Deleting the datastore file [datastore2] a0762952-2afd-448a-8e46-ba788a4ca131 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.853165] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00eda637-06b0-4ea7-a2c0-e1c2ae10b3f8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.863172] env[61978]: DEBUG oslo_vmware.api [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 893.863172] env[61978]: value = "task-1394738" [ 893.863172] env[61978]: _type = "Task" [ 893.863172] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.874278] env[61978]: DEBUG oslo_vmware.api [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394738, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.024512] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394736, 'name': ReconfigVM_Task, 'duration_secs': 0.387302} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.024802] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Reconfigured VM instance instance-00000008 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 894.025609] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee6062b-2e10-40f4-a4ea-93eb8f9a3e34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.052775] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] b26a4784-698d-477a-8db7-58156899d231/b26a4784-698d-477a-8db7-58156899d231.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.055556] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41af81d2-4193-46a1-8f42-49f2f0374a21 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.077829] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 894.077829] env[61978]: value = "task-1394739" [ 894.077829] env[61978]: _type = "Task" [ 894.077829] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.090503] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394739, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.092278] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.102950] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394737, 'name': Rename_Task, 'duration_secs': 0.164508} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.103359] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 894.103691] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18a1b9db-330a-4e3f-8aa2-84228ce86beb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.112786] env[61978]: DEBUG nova.compute.utils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 894.115851] env[61978]: DEBUG nova.compute.manager [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 894.116074] env[61978]: DEBUG nova.network.neutron [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 894.120162] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Waiting for the task: (returnval){ [ 894.120162] env[61978]: value = "task-1394740" [ 894.120162] env[61978]: _type = "Task" [ 894.120162] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.129654] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394740, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.173944] env[61978]: DEBUG nova.policy [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8f50bac42274555ab08e047cdb028ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43ebac7c44604f55b94cbc06648f4908', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 894.334368] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquiring lock "78b78ae7-74fe-4403-be9b-229abe6a7353" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.334594] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lock "78b78ae7-74fe-4403-be9b-229abe6a7353" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.374163] env[61978]: DEBUG oslo_vmware.api [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394738, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33906} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.374576] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 894.374677] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 894.375360] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 894.375360] env[61978]: INFO nova.compute.manager [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Took 1.71 seconds to destroy the instance on the hypervisor. [ 894.375360] env[61978]: DEBUG oslo.service.loopingcall [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 894.375559] env[61978]: DEBUG nova.compute.manager [-] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 894.375559] env[61978]: DEBUG nova.network.neutron [-] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 894.558904] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760487a3-4a23-4d45-bfc3-caf8d7e92712 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.567278] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b1d4e5-d8b2-4dd3-bf29-72c8a4a5e342 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.602128] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a80627-edd2-4959-8e69-2c31480d6c7d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.613441] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e091234e-bbb9-4e77-b6e3-747b18ad0b46 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.617871] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394739, 'name': ReconfigVM_Task, 'duration_secs': 0.404179} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.618217] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Reconfigured VM instance instance-00000008 to attach disk [datastore1] b26a4784-698d-477a-8db7-58156899d231/b26a4784-698d-477a-8db7-58156899d231.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 894.618505] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance 'b26a4784-698d-477a-8db7-58156899d231' progress to 50 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 894.626543] env[61978]: DEBUG nova.compute.manager [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 894.638971] env[61978]: DEBUG nova.compute.provider_tree [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.647302] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394740, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.659362] env[61978]: DEBUG nova.network.neutron [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Successfully updated port: a4813ec3-c879-4bdb-939f-21a96adfecf2 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 894.667777] env[61978]: DEBUG nova.compute.manager [req-1c481fca-565f-4ed6-be99-20d6e3460518 req-1d57ca67-6c64-4e3e-a5f2-4b387b6b650a service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Received event network-changed-51f0ce4a-1710-4256-9ca8-ac173927565b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 894.667974] env[61978]: DEBUG nova.compute.manager [req-1c481fca-565f-4ed6-be99-20d6e3460518 req-1d57ca67-6c64-4e3e-a5f2-4b387b6b650a service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Refreshing instance network info cache due to event network-changed-51f0ce4a-1710-4256-9ca8-ac173927565b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 894.668216] env[61978]: DEBUG oslo_concurrency.lockutils [req-1c481fca-565f-4ed6-be99-20d6e3460518 req-1d57ca67-6c64-4e3e-a5f2-4b387b6b650a service nova] Acquiring lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.668350] env[61978]: DEBUG oslo_concurrency.lockutils [req-1c481fca-565f-4ed6-be99-20d6e3460518 req-1d57ca67-6c64-4e3e-a5f2-4b387b6b650a service nova] Acquired lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.668503] env[61978]: DEBUG nova.network.neutron [req-1c481fca-565f-4ed6-be99-20d6e3460518 req-1d57ca67-6c64-4e3e-a5f2-4b387b6b650a service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Refreshing network info cache for port 51f0ce4a-1710-4256-9ca8-ac173927565b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 894.860696] env[61978]: DEBUG nova.network.neutron [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Successfully created port: f28bab30-b505-494a-97cf-e0d85ff19cda {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.137350] env[61978]: DEBUG oslo_vmware.api [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394740, 'name': PowerOnVM_Task, 'duration_secs': 0.593535} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.138117] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6dccd3-2bbd-4a03-a70d-a686ec39a607 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.140662] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 895.140903] env[61978]: INFO nova.compute.manager [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Took 12.02 seconds to spawn the instance on the hypervisor. [ 895.141053] env[61978]: DEBUG nova.compute.manager [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 895.144819] env[61978]: DEBUG nova.scheduler.client.report [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 895.148931] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ed2ab0-2b40-47b4-93b7-914346313a8b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.172280] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Acquiring lock "refresh_cache-3ddf7322-5504-408f-af6c-af73fb1c4286" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.172280] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Acquired lock "refresh_cache-3ddf7322-5504-408f-af6c-af73fb1c4286" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.172478] env[61978]: DEBUG nova.network.neutron [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 895.177069] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdc6707-3620-43f7-a161-2b066423c266 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.197877] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance 'b26a4784-698d-477a-8db7-58156899d231' progress to 67 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 895.208689] env[61978]: INFO nova.network.neutron [req-1c481fca-565f-4ed6-be99-20d6e3460518 req-1d57ca67-6c64-4e3e-a5f2-4b387b6b650a service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Port 51f0ce4a-1710-4256-9ca8-ac173927565b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 895.208915] env[61978]: DEBUG nova.network.neutron [req-1c481fca-565f-4ed6-be99-20d6e3460518 req-1d57ca67-6c64-4e3e-a5f2-4b387b6b650a service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.508496] env[61978]: DEBUG nova.network.neutron [-] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.652472] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.045s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.656946] env[61978]: DEBUG nova.compute.manager [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 896.377028] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.582s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.378292] env[61978]: INFO nova.compute.claims [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.389260] env[61978]: DEBUG oslo_concurrency.lockutils [req-1c481fca-565f-4ed6-be99-20d6e3460518 req-1d57ca67-6c64-4e3e-a5f2-4b387b6b650a service nova] Releasing lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.389662] env[61978]: DEBUG nova.compute.manager [req-1c481fca-565f-4ed6-be99-20d6e3460518 req-1d57ca67-6c64-4e3e-a5f2-4b387b6b650a service nova] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Received event network-vif-deleted-c8d18564-3f86-41ec-88ac-735b63415259 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.390191] env[61978]: INFO nova.compute.manager [-] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Took 2.01 seconds to deallocate network for instance. [ 896.395218] env[61978]: INFO nova.scheduler.client.report [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Deleted allocations for instance 2084a365-b662-4564-b899-ab4c4a63f2b9 [ 896.398382] env[61978]: INFO nova.compute.manager [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Took 30.89 seconds to build instance. [ 896.406884] env[61978]: DEBUG nova.virt.hardware [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 896.407213] env[61978]: DEBUG nova.virt.hardware [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 896.407297] env[61978]: DEBUG nova.virt.hardware [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.407455] env[61978]: DEBUG nova.virt.hardware [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 896.407599] env[61978]: DEBUG nova.virt.hardware [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.407742] env[61978]: DEBUG nova.virt.hardware [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 896.407968] env[61978]: DEBUG nova.virt.hardware [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 896.408202] env[61978]: DEBUG nova.virt.hardware [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 896.408358] env[61978]: DEBUG nova.virt.hardware [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 896.408534] env[61978]: DEBUG nova.virt.hardware [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 896.409096] env[61978]: DEBUG nova.virt.hardware [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 896.409631] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2891f3-ee9d-4d30-9eb1-08fec1f999ac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.420015] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313502e5-c8d5-4b6f-9efd-aa175270c5ad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.457905] env[61978]: DEBUG nova.network.neutron [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 896.490250] env[61978]: DEBUG nova.network.neutron [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Port 3a886f4f-5f7c-4f97-8f00-2555aebe9856 binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 896.605809] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.605918] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.767456] env[61978]: DEBUG nova.network.neutron [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Updating instance_info_cache with network_info: [{"id": "a4813ec3-c879-4bdb-939f-21a96adfecf2", "address": "fa:16:3e:b3:dc:a9", "network": {"id": "15c14192-e89d-4440-b84f-a102214859df", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-878575290-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198c0b3be81b48e08fbbcd5e637eecbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c92f34c-1dd7-4dc5-b8e8-f6c55cc5b4b8", "external-id": "nsx-vlan-transportzone-850", "segmentation_id": 850, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4813ec3-c8", "ovs_interfaceid": "a4813ec3-c879-4bdb-939f-21a96adfecf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.905683] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6dfb079d-8a19-4f2c-a4ec-aae2e6c0a3b6 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Lock "a4d45835-f065-445f-bcb6-d1b01d545cb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.610s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.906284] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bd8f3fac-b9d9-47e3-a78b-65bf0e268b59 tempest-ServerDiagnosticsNegativeTest-1645519855 tempest-ServerDiagnosticsNegativeTest-1645519855-project-member] Lock "2084a365-b662-4564-b899-ab4c4a63f2b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.866s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.938805] env[61978]: DEBUG oslo_concurrency.lockutils [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.002050] env[61978]: DEBUG nova.network.neutron [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Successfully updated port: f28bab30-b505-494a-97cf-e0d85ff19cda {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 897.104253] env[61978]: DEBUG nova.compute.manager [req-51c37dad-0528-4f08-918a-1bafec0db05c req-3ed91364-4b6f-4701-b7bd-8780bafd249d service nova] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Received event network-vif-deleted-51f0ce4a-1710-4256-9ca8-ac173927565b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 897.272892] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Releasing lock "refresh_cache-3ddf7322-5504-408f-af6c-af73fb1c4286" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.272892] env[61978]: DEBUG nova.compute.manager [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Instance network_info: |[{"id": "a4813ec3-c879-4bdb-939f-21a96adfecf2", "address": "fa:16:3e:b3:dc:a9", "network": {"id": "15c14192-e89d-4440-b84f-a102214859df", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-878575290-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198c0b3be81b48e08fbbcd5e637eecbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c92f34c-1dd7-4dc5-b8e8-f6c55cc5b4b8", "external-id": "nsx-vlan-transportzone-850", "segmentation_id": 850, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4813ec3-c8", "ovs_interfaceid": "a4813ec3-c879-4bdb-939f-21a96adfecf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 897.273131] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:dc:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c92f34c-1dd7-4dc5-b8e8-f6c55cc5b4b8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4813ec3-c879-4bdb-939f-21a96adfecf2', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 897.279525] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Creating folder: Project (198c0b3be81b48e08fbbcd5e637eecbc). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 897.280321] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd1d99b3-2522-466d-b1a7-e4a819cafeba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.294278] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Created folder: Project (198c0b3be81b48e08fbbcd5e637eecbc) in parent group-v295764. [ 897.294811] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Creating folder: Instances. Parent ref: group-v295825. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 897.295342] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-099a8153-084f-4449-afd8-5c9ae7c10d84 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.309854] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Created folder: Instances in parent group-v295825. [ 897.309854] env[61978]: DEBUG oslo.service.loopingcall [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 897.309854] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 897.309854] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6501d04d-7c4d-46ea-8d0b-9fe8333c3322 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.338290] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 897.338290] env[61978]: value = "task-1394743" [ 897.338290] env[61978]: _type = "Task" [ 897.338290] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.350520] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394743, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.410041] env[61978]: DEBUG nova.compute.manager [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 897.515660] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "refresh_cache-76dff032-a806-4910-a48b-8850b05131c1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.515814] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "refresh_cache-76dff032-a806-4910-a48b-8850b05131c1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.516057] env[61978]: DEBUG nova.network.neutron [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.534365] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "b26a4784-698d-477a-8db7-58156899d231-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.534604] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "b26a4784-698d-477a-8db7-58156899d231-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.534787] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "b26a4784-698d-477a-8db7-58156899d231-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.570877] env[61978]: DEBUG nova.network.neutron [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.608453] env[61978]: DEBUG nova.compute.manager [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Received event network-changed-ebb15ef6-0310-4f67-8247-f09f03d452db {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 897.608669] env[61978]: DEBUG nova.compute.manager [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Refreshing instance network info cache due to event network-changed-ebb15ef6-0310-4f67-8247-f09f03d452db. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 897.608861] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] Acquiring lock "refresh_cache-96bef3f3-a45c-43ba-a86a-66c1d5686ea6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.609047] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] Acquired lock "refresh_cache-96bef3f3-a45c-43ba-a86a-66c1d5686ea6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.609200] env[61978]: DEBUG nova.network.neutron [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Refreshing network info cache for port ebb15ef6-0310-4f67-8247-f09f03d452db {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 897.759957] env[61978]: DEBUG nova.network.neutron [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Updating instance_info_cache with network_info: [{"id": "f28bab30-b505-494a-97cf-e0d85ff19cda", "address": "fa:16:3e:46:17:c4", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28bab30-b5", "ovs_interfaceid": "f28bab30-b505-494a-97cf-e0d85ff19cda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.850763] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394743, 'name': CreateVM_Task, 'duration_secs': 0.394505} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.851066] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 897.852137] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.852367] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.852709] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 897.853027] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0eaa8af-9e65-4bf0-baf7-f1cc6e9962c3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.861587] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Waiting for the task: (returnval){ [ 897.861587] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52122951-357b-5314-05f6-5a3bee5cb1ea" [ 897.861587] env[61978]: _type = "Task" [ 897.861587] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.871105] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52122951-357b-5314-05f6-5a3bee5cb1ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.897976] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58db9181-6cd4-42c6-8169-4cc03f889a66 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.907037] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d5021c-c812-4648-a1e0-e3d6324ea56a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.940877] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae84fbd-63f2-486b-a59d-e030bb052353 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.951358] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa4ab84-41f0-42f2-b173-b1bc62af91eb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.956631] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.967044] env[61978]: DEBUG nova.compute.provider_tree [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.263213] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "refresh_cache-76dff032-a806-4910-a48b-8850b05131c1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.263554] env[61978]: DEBUG nova.compute.manager [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Instance network_info: |[{"id": "f28bab30-b505-494a-97cf-e0d85ff19cda", "address": "fa:16:3e:46:17:c4", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28bab30-b5", "ovs_interfaceid": "f28bab30-b505-494a-97cf-e0d85ff19cda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 898.263972] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:17:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed3ffc1d-9f86-4029-857e-6cd1d383edbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f28bab30-b505-494a-97cf-e0d85ff19cda', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.271599] env[61978]: DEBUG oslo.service.loopingcall [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.271824] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 898.272055] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8facc89-5eb2-489b-91e5-b1d8253f0f67 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.295959] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.295959] env[61978]: value = "task-1394744" [ 898.295959] env[61978]: _type = "Task" [ 898.295959] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.304275] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394744, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.373622] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52122951-357b-5314-05f6-5a3bee5cb1ea, 'name': SearchDatastore_Task, 'duration_secs': 0.01149} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.373941] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.374384] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 898.374662] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.374816] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.375226] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 898.375347] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92607aeb-3835-4ebe-9fbc-e8691ce40f31 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.386064] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 898.386269] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 898.387525] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f4c39ac-e102-4b02-9912-f646885a49f6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.391547] env[61978]: DEBUG nova.network.neutron [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Updated VIF entry in instance network info cache for port ebb15ef6-0310-4f67-8247-f09f03d452db. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 898.391950] env[61978]: DEBUG nova.network.neutron [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Updating instance_info_cache with network_info: [{"id": "ebb15ef6-0310-4f67-8247-f09f03d452db", "address": "fa:16:3e:e2:a7:be", "network": {"id": "140282e9-127a-4f19-b6d1-6bea55474c67", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-904021562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a3471435d4747648cd8ddf0817d9b85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebb15ef6-03", "ovs_interfaceid": "ebb15ef6-0310-4f67-8247-f09f03d452db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.397018] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Waiting for the task: (returnval){ [ 898.397018] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5245dbf1-0b73-26fb-ac73-986c4f841331" [ 898.397018] env[61978]: _type = "Task" [ 898.397018] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.405540] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5245dbf1-0b73-26fb-ac73-986c4f841331, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.473188] env[61978]: DEBUG nova.scheduler.client.report [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 898.594506] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.594950] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.595290] env[61978]: DEBUG nova.network.neutron [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.810497] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394744, 'name': CreateVM_Task, 'duration_secs': 0.399036} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.810703] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 898.811402] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.811611] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.812010] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 898.812311] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a3958e7-0235-4cde-bca2-8f6001339bfb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.818758] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 898.818758] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52631dd2-93cf-0b95-2a21-499be03fc826" [ 898.818758] env[61978]: _type = "Task" [ 898.818758] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.830270] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52631dd2-93cf-0b95-2a21-499be03fc826, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.897048] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] Releasing lock "refresh_cache-96bef3f3-a45c-43ba-a86a-66c1d5686ea6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.897381] env[61978]: DEBUG nova.compute.manager [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Received event network-vif-plugged-a4813ec3-c879-4bdb-939f-21a96adfecf2 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 898.897580] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] Acquiring lock "3ddf7322-5504-408f-af6c-af73fb1c4286-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.897832] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] Lock "3ddf7322-5504-408f-af6c-af73fb1c4286-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.898009] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] Lock "3ddf7322-5504-408f-af6c-af73fb1c4286-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.898231] env[61978]: DEBUG nova.compute.manager [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] No waiting events found dispatching network-vif-plugged-a4813ec3-c879-4bdb-939f-21a96adfecf2 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 898.898408] env[61978]: WARNING nova.compute.manager [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Received unexpected event network-vif-plugged-a4813ec3-c879-4bdb-939f-21a96adfecf2 for instance with vm_state building and task_state spawning. [ 898.898568] env[61978]: DEBUG nova.compute.manager [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Received event network-changed-a4813ec3-c879-4bdb-939f-21a96adfecf2 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 898.898719] env[61978]: DEBUG nova.compute.manager [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Refreshing instance network info cache due to event network-changed-a4813ec3-c879-4bdb-939f-21a96adfecf2. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 898.898906] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] Acquiring lock "refresh_cache-3ddf7322-5504-408f-af6c-af73fb1c4286" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.899050] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] Acquired lock "refresh_cache-3ddf7322-5504-408f-af6c-af73fb1c4286" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.899207] env[61978]: DEBUG nova.network.neutron [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Refreshing network info cache for port a4813ec3-c879-4bdb-939f-21a96adfecf2 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.911993] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5245dbf1-0b73-26fb-ac73-986c4f841331, 'name': SearchDatastore_Task, 'duration_secs': 0.012272} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.913678] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8de1db0f-93ba-439e-ab0e-df9a81b90946 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.921161] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Waiting for the task: (returnval){ [ 898.921161] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fd7a33-84f3-1b78-0ca9-33a1c250044e" [ 898.921161] env[61978]: _type = "Task" [ 898.921161] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.932638] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fd7a33-84f3-1b78-0ca9-33a1c250044e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.979439] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.603s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.980016] env[61978]: DEBUG nova.compute.manager [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 898.983128] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.143s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.984762] env[61978]: INFO nova.compute.claims [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.267671] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "f22e097d-f1a5-414a-82cc-ab455db876c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.268642] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "f22e097d-f1a5-414a-82cc-ab455db876c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.333014] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52631dd2-93cf-0b95-2a21-499be03fc826, 'name': SearchDatastore_Task, 'duration_secs': 0.012246} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.333551] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.334430] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.335334] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.432109] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fd7a33-84f3-1b78-0ca9-33a1c250044e, 'name': SearchDatastore_Task, 'duration_secs': 0.012957} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.432371] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.433473] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 3ddf7322-5504-408f-af6c-af73fb1c4286/3ddf7322-5504-408f-af6c-af73fb1c4286.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 899.433473] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.433473] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.433473] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94de237e-b2a2-41b3-8bbb-bcfeb58d5133 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.435968] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d71f9d9a-4cf9-4c62-900b-dd6c18f3fef4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.450880] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.452083] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.453012] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Waiting for the task: (returnval){ [ 899.453012] env[61978]: value = "task-1394745" [ 899.453012] env[61978]: _type = "Task" [ 899.453012] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.456236] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0da562e8-2188-4097-8b15-a7ce198c1df0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.468506] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 899.468506] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526a0fa1-9f09-2dcd-1c76-1f4b9ec0018a" [ 899.468506] env[61978]: _type = "Task" [ 899.468506] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.472073] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394745, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.481882] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526a0fa1-9f09-2dcd-1c76-1f4b9ec0018a, 'name': SearchDatastore_Task, 'duration_secs': 0.010411} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.486834] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f48163df-4b1e-4b86-80de-5b0552387503 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.491903] env[61978]: DEBUG nova.compute.utils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 899.495052] env[61978]: DEBUG nova.compute.manager [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 899.495327] env[61978]: DEBUG nova.network.neutron [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 899.502612] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 899.502612] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]529cf181-2618-472c-ae07-b8b12cafad9e" [ 899.502612] env[61978]: _type = "Task" [ 899.502612] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.514738] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529cf181-2618-472c-ae07-b8b12cafad9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.596745] env[61978]: DEBUG nova.network.neutron [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance_info_cache with network_info: [{"id": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "address": "fa:16:3e:94:aa:92", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a886f4f-5f", "ovs_interfaceid": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.808186] env[61978]: DEBUG nova.policy [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e607dfc944154c1faed12da382640f0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d7394d965f94155a34dd0ecc0957649', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 899.970849] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394745, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.998583] env[61978]: DEBUG nova.compute.manager [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 900.001145] env[61978]: DEBUG nova.network.neutron [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Updated VIF entry in instance network info cache for port a4813ec3-c879-4bdb-939f-21a96adfecf2. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 900.002487] env[61978]: DEBUG nova.network.neutron [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Updating instance_info_cache with network_info: [{"id": "a4813ec3-c879-4bdb-939f-21a96adfecf2", "address": "fa:16:3e:b3:dc:a9", "network": {"id": "15c14192-e89d-4440-b84f-a102214859df", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-878575290-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198c0b3be81b48e08fbbcd5e637eecbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c92f34c-1dd7-4dc5-b8e8-f6c55cc5b4b8", "external-id": "nsx-vlan-transportzone-850", "segmentation_id": 850, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4813ec3-c8", "ovs_interfaceid": "a4813ec3-c879-4bdb-939f-21a96adfecf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.019181] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529cf181-2618-472c-ae07-b8b12cafad9e, 'name': SearchDatastore_Task, 'duration_secs': 0.013532} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.019457] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.019717] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 76dff032-a806-4910-a48b-8850b05131c1/76dff032-a806-4910-a48b-8850b05131c1.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.019982] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4270bc32-4732-4a87-8983-094c0a567088 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.030115] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 900.030115] env[61978]: value = "task-1394746" [ 900.030115] env[61978]: _type = "Task" [ 900.030115] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.044394] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394746, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.099825] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.478175] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394745, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.701512} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.478779] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 3ddf7322-5504-408f-af6c-af73fb1c4286/3ddf7322-5504-408f-af6c-af73fb1c4286.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 900.479936] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 900.480259] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d66cac41-3853-4364-98aa-f04b0eaca844 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.489555] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Waiting for the task: (returnval){ [ 900.489555] env[61978]: value = "task-1394747" [ 900.489555] env[61978]: _type = "Task" [ 900.489555] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.503435] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394747, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.513405] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] Releasing lock "refresh_cache-3ddf7322-5504-408f-af6c-af73fb1c4286" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.513576] env[61978]: DEBUG nova.compute.manager [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Received event network-changed-a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 900.513904] env[61978]: DEBUG nova.compute.manager [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Refreshing instance network info cache due to event network-changed-a5290cfd-6d88-4c49-a54c-626d4c4843bd. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 900.513967] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] Acquiring lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.514149] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] Acquired lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.514318] env[61978]: DEBUG nova.network.neutron [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Refreshing network info cache for port a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 900.541365] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394746, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.546089] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888ac471-dc3b-4c50-9a6a-019c05783fef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.554427] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27ec6b7-7a5a-49be-a361-e64ef40ae7c2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.589538] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb19af7-afb5-41fa-ab2c-ef650002c23c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.598889] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126c357c-5909-475e-9864-30f6307b25ce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.617092] env[61978]: DEBUG nova.compute.provider_tree [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.632445] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c2fce9-feb9-4462-b8e1-788f8031e48c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.652413] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44445719-30fc-4829-919e-ff730d5e540b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.660249] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance 'b26a4784-698d-477a-8db7-58156899d231' progress to 83 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 900.855027] env[61978]: DEBUG nova.network.neutron [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Successfully created port: 66a47730-43bc-4ac1-b494-0ec1041be9d2 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 901.000660] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394747, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.262527} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.001245] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 901.002107] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c322922f-1f92-4879-ac6a-7fdc3233705e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.017761] env[61978]: DEBUG nova.compute.manager [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 901.032033] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 3ddf7322-5504-408f-af6c-af73fb1c4286/3ddf7322-5504-408f-af6c-af73fb1c4286.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 901.032186] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac3e61e8-7386-4691-b4d6-d22fa7fd4cc4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.067669] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394746, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530066} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.069225] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 76dff032-a806-4910-a48b-8850b05131c1/76dff032-a806-4910-a48b-8850b05131c1.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 901.069465] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.069781] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Waiting for the task: (returnval){ [ 901.069781] env[61978]: value = "task-1394748" [ 901.069781] env[61978]: _type = "Task" [ 901.069781] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.072209] env[61978]: DEBUG nova.virt.hardware [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 901.072446] env[61978]: DEBUG nova.virt.hardware [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 901.072569] env[61978]: DEBUG nova.virt.hardware [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.072744] env[61978]: DEBUG nova.virt.hardware [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 901.072888] env[61978]: DEBUG nova.virt.hardware [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.073093] env[61978]: DEBUG nova.virt.hardware [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 901.073275] env[61978]: DEBUG nova.virt.hardware [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 901.073905] env[61978]: DEBUG nova.virt.hardware [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 901.073905] env[61978]: DEBUG nova.virt.hardware [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 901.073905] env[61978]: DEBUG nova.virt.hardware [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 901.073905] env[61978]: DEBUG nova.virt.hardware [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 901.074409] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-886ad826-1685-43fe-8901-9b3dbfe80467 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.077234] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f29cbfe-76b8-46e0-a19d-5ede6a5b3407 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.094730] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e2af25-4dc4-4587-b088-f3ffc9a07998 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.100973] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 901.100973] env[61978]: value = "task-1394749" [ 901.100973] env[61978]: _type = "Task" [ 901.100973] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.101229] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394748, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.122895] env[61978]: DEBUG nova.scheduler.client.report [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 901.126165] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394749, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.168756] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 901.168756] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3345c02-d596-4a14-984b-dc836548b524 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.177718] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 901.177718] env[61978]: value = "task-1394750" [ 901.177718] env[61978]: _type = "Task" [ 901.177718] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.188360] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394750, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.565376] env[61978]: DEBUG nova.network.neutron [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updated VIF entry in instance network info cache for port a5290cfd-6d88-4c49-a54c-626d4c4843bd. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 901.565376] env[61978]: DEBUG nova.network.neutron [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updating instance_info_cache with network_info: [{"id": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "address": "fa:16:3e:ed:43:9e", "network": {"id": "4b7af29e-cc4a-4765-8d05-6adf88573ad3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1006801081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00c674bbf1e945ba946d844f9856fdfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5290cfd-6d", "ovs_interfaceid": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.591041] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394748, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.614746] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394749, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.30088} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.616662] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 901.618624] env[61978]: DEBUG nova.compute.manager [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Received event network-vif-plugged-f28bab30-b505-494a-97cf-e0d85ff19cda {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 901.618880] env[61978]: DEBUG oslo_concurrency.lockutils [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] Acquiring lock "76dff032-a806-4910-a48b-8850b05131c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.619160] env[61978]: DEBUG oslo_concurrency.lockutils [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] Lock "76dff032-a806-4910-a48b-8850b05131c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.619385] env[61978]: DEBUG oslo_concurrency.lockutils [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] Lock "76dff032-a806-4910-a48b-8850b05131c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.619783] env[61978]: DEBUG nova.compute.manager [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] [instance: 76dff032-a806-4910-a48b-8850b05131c1] No waiting events found dispatching network-vif-plugged-f28bab30-b505-494a-97cf-e0d85ff19cda {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 901.620302] env[61978]: WARNING nova.compute.manager [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Received unexpected event network-vif-plugged-f28bab30-b505-494a-97cf-e0d85ff19cda for instance with vm_state building and task_state spawning. [ 901.620302] env[61978]: DEBUG nova.compute.manager [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Received event network-changed-f28bab30-b505-494a-97cf-e0d85ff19cda {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 901.620302] env[61978]: DEBUG nova.compute.manager [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Refreshing instance network info cache due to event network-changed-f28bab30-b505-494a-97cf-e0d85ff19cda. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 901.620619] env[61978]: DEBUG oslo_concurrency.lockutils [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] Acquiring lock "refresh_cache-76dff032-a806-4910-a48b-8850b05131c1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.620810] env[61978]: DEBUG oslo_concurrency.lockutils [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] Acquired lock "refresh_cache-76dff032-a806-4910-a48b-8850b05131c1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.621033] env[61978]: DEBUG nova.network.neutron [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Refreshing network info cache for port f28bab30-b505-494a-97cf-e0d85ff19cda {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.625070] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4474c1de-3987-4905-b07f-9b61e24615ad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.637622] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.643024] env[61978]: DEBUG nova.compute.manager [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 901.644896] env[61978]: DEBUG oslo_concurrency.lockutils [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.584s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.645203] env[61978]: DEBUG nova.objects.instance [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61978) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 901.673115] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] 76dff032-a806-4910-a48b-8850b05131c1/76dff032-a806-4910-a48b-8850b05131c1.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 901.673782] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa7d265e-21eb-44ae-a137-0b2bb4d5a766 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.693340] env[61978]: DEBUG nova.compute.manager [req-fef6e9b7-c7bb-4996-a92d-18e90667d61e req-a89779c6-0cbc-44ee-af6f-3dca7d09d9fd service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Received event network-changed-51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 901.693552] env[61978]: DEBUG nova.compute.manager [req-fef6e9b7-c7bb-4996-a92d-18e90667d61e req-a89779c6-0cbc-44ee-af6f-3dca7d09d9fd service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Refreshing instance network info cache due to event network-changed-51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 901.693768] env[61978]: DEBUG oslo_concurrency.lockutils [req-fef6e9b7-c7bb-4996-a92d-18e90667d61e req-a89779c6-0cbc-44ee-af6f-3dca7d09d9fd service nova] Acquiring lock "refresh_cache-2f5b06f6-7178-4fdf-93b6-65477f020898" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.693913] env[61978]: DEBUG oslo_concurrency.lockutils [req-fef6e9b7-c7bb-4996-a92d-18e90667d61e req-a89779c6-0cbc-44ee-af6f-3dca7d09d9fd service nova] Acquired lock "refresh_cache-2f5b06f6-7178-4fdf-93b6-65477f020898" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.694088] env[61978]: DEBUG nova.network.neutron [req-fef6e9b7-c7bb-4996-a92d-18e90667d61e req-a89779c6-0cbc-44ee-af6f-3dca7d09d9fd service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Refreshing network info cache for port 51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.707431] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 901.707431] env[61978]: value = "task-1394751" [ 901.707431] env[61978]: _type = "Task" [ 901.707431] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.710901] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394750, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.724815] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394751, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.068741] env[61978]: DEBUG oslo_concurrency.lockutils [req-ff62d578-e51a-47e0-bf3c-f1c491d1c995 req-784890e2-5beb-4983-959f-6fa0764c2653 service nova] Releasing lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.090412] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394748, 'name': ReconfigVM_Task, 'duration_secs': 0.947869} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.090714] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 3ddf7322-5504-408f-af6c-af73fb1c4286/3ddf7322-5504-408f-af6c-af73fb1c4286.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.091485] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-776cecf0-63e7-40a0-bacd-afa294281039 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.099472] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Waiting for the task: (returnval){ [ 902.099472] env[61978]: value = "task-1394752" [ 902.099472] env[61978]: _type = "Task" [ 902.099472] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.113559] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394752, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.144175] env[61978]: DEBUG nova.network.neutron [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Updated VIF entry in instance network info cache for port f28bab30-b505-494a-97cf-e0d85ff19cda. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 902.146364] env[61978]: DEBUG nova.network.neutron [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Updating instance_info_cache with network_info: [{"id": "f28bab30-b505-494a-97cf-e0d85ff19cda", "address": "fa:16:3e:46:17:c4", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28bab30-b5", "ovs_interfaceid": "f28bab30-b505-494a-97cf-e0d85ff19cda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.147975] env[61978]: DEBUG nova.compute.utils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 902.149762] env[61978]: DEBUG nova.compute.manager [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 902.149949] env[61978]: DEBUG nova.network.neutron [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 902.211309] env[61978]: DEBUG oslo_vmware.api [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394750, 'name': PowerOnVM_Task, 'duration_secs': 0.595137} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.211309] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 902.211309] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d99c0fdd-42a8-430e-9bb5-2cdb2a9987f5 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance 'b26a4784-698d-477a-8db7-58156899d231' progress to 100 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 902.222590] env[61978]: DEBUG nova.policy [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e0391af85174f0393c20bb2f49738f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6337204785bd4d7d98711964c1823f52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 902.229096] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.615937] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394752, 'name': Rename_Task, 'duration_secs': 0.140133} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.616477] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 902.616756] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b975628-28cb-4e2c-a177-c60771af0b89 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.625766] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Waiting for the task: (returnval){ [ 902.625766] env[61978]: value = "task-1394753" [ 902.625766] env[61978]: _type = "Task" [ 902.625766] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.634726] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394753, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.647487] env[61978]: DEBUG oslo_concurrency.lockutils [req-7dbe42ad-dead-4ed7-8e22-faa2d7dbee39 req-17ffb744-39e1-44f9-a291-d3476d4617b4 service nova] Releasing lock "refresh_cache-76dff032-a806-4910-a48b-8850b05131c1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.656497] env[61978]: DEBUG nova.compute.manager [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 902.660322] env[61978]: DEBUG oslo_concurrency.lockutils [None req-831e8a0f-c49c-43c0-bd58-27fba5eb284d tempest-ServersAdmin275Test-248156959 tempest-ServersAdmin275Test-248156959-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.663907] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 25.604s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.664185] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.664426] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 902.664805] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.273s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.666397] env[61978]: INFO nova.compute.claims [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 902.669844] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb7ff97-fcf3-47fa-83ed-021ecc51bd78 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.680937] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf90a2d-44d5-4c31-a520-7bea89d48d98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.699175] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce2bfc3-d66a-4772-a71c-b261c5903519 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.708988] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a4410d-9d2a-43ca-91b7-81c4a6bbaddc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.756259] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179673MB free_disk=185GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 902.756436] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.760896] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394751, 'name': ReconfigVM_Task, 'duration_secs': 0.588009} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.761445] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Reconfigured VM instance instance-00000016 to attach disk [datastore2] 76dff032-a806-4910-a48b-8850b05131c1/76dff032-a806-4910-a48b-8850b05131c1.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.762131] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04a77471-f60c-4ab7-b532-213378571728 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.771139] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 902.771139] env[61978]: value = "task-1394754" [ 902.771139] env[61978]: _type = "Task" [ 902.771139] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.781787] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394754, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.965229] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "a0762952-2afd-448a-8e46-ba788a4ca131" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.967301] env[61978]: DEBUG nova.network.neutron [req-fef6e9b7-c7bb-4996-a92d-18e90667d61e req-a89779c6-0cbc-44ee-af6f-3dca7d09d9fd service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Updated VIF entry in instance network info cache for port 51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 902.967681] env[61978]: DEBUG nova.network.neutron [req-fef6e9b7-c7bb-4996-a92d-18e90667d61e req-a89779c6-0cbc-44ee-af6f-3dca7d09d9fd service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Updating instance_info_cache with network_info: [{"id": "51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f", "address": "fa:16:3e:7d:82:2a", "network": {"id": "0d7f3ee4-9789-4ca3-802e-627a975ec68f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-286598400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8499568efa184c35a99e2959f04273fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51cc84cf-63", "ovs_interfaceid": "51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.136564] env[61978]: DEBUG oslo_vmware.api [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394753, 'name': PowerOnVM_Task, 'duration_secs': 0.496997} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.136859] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.137074] env[61978]: INFO nova.compute.manager [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Took 10.31 seconds to spawn the instance on the hypervisor. [ 903.137278] env[61978]: DEBUG nova.compute.manager [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 903.138100] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3df7bac-653b-4564-ac7b-0024b647400f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.180275] env[61978]: DEBUG nova.network.neutron [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Successfully created port: 78b26e7f-9b45-42b2-8950-c83ae8b8b32f {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 903.285729] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394754, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.472582] env[61978]: DEBUG oslo_concurrency.lockutils [req-fef6e9b7-c7bb-4996-a92d-18e90667d61e req-a89779c6-0cbc-44ee-af6f-3dca7d09d9fd service nova] Releasing lock "refresh_cache-2f5b06f6-7178-4fdf-93b6-65477f020898" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.664281] env[61978]: INFO nova.compute.manager [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Took 34.76 seconds to build instance. [ 903.667741] env[61978]: DEBUG nova.compute.manager [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 903.704075] env[61978]: DEBUG nova.virt.hardware [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 903.704262] env[61978]: DEBUG nova.virt.hardware [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 903.704618] env[61978]: DEBUG nova.virt.hardware [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 903.704697] env[61978]: DEBUG nova.virt.hardware [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 903.704910] env[61978]: DEBUG nova.virt.hardware [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 903.704986] env[61978]: DEBUG nova.virt.hardware [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 903.705237] env[61978]: DEBUG nova.virt.hardware [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 903.705425] env[61978]: DEBUG nova.virt.hardware [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 903.705901] env[61978]: DEBUG nova.virt.hardware [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 903.705901] env[61978]: DEBUG nova.virt.hardware [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 903.706130] env[61978]: DEBUG nova.virt.hardware [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 903.707195] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ed526b-a40f-4c77-a224-a37edfc15e80 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.722719] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62fa1be-ede1-4b62-9737-47450720eafd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.748389] env[61978]: DEBUG nova.network.neutron [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Successfully updated port: 66a47730-43bc-4ac1-b494-0ec1041be9d2 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 903.788427] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394754, 'name': Rename_Task, 'duration_secs': 0.796898} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.788698] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.791589] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91efa2de-adc3-4c70-bf7e-4ef7380d5fbc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.803111] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 903.803111] env[61978]: value = "task-1394755" [ 903.803111] env[61978]: _type = "Task" [ 903.803111] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.818730] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394755, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.169081] env[61978]: DEBUG oslo_concurrency.lockutils [None req-729dc1f7-75f6-4295-80d8-eb4c7f68219b tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Lock "3ddf7322-5504-408f-af6c-af73fb1c4286" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.935s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.251507] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "refresh_cache-50788030-4dc2-4215-bf2c-acba5dd33ce4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.251507] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "refresh_cache-50788030-4dc2-4215-bf2c-acba5dd33ce4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.251507] env[61978]: DEBUG nova.network.neutron [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.254029] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db163cf5-76ad-43f0-b2d7-ff1713f156f8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.263817] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3dd51a5-5882-422c-b72e-e04e45b54bf6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.300212] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4b895d-0038-41a6-8a59-29e2b6cd3220 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.316459] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68352f16-bbf6-4b4e-934a-57e0a927b60d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.324693] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394755, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.336023] env[61978]: DEBUG nova.compute.provider_tree [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.674397] env[61978]: DEBUG nova.compute.manager [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 904.799838] env[61978]: DEBUG nova.compute.manager [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Received event network-changed-a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 904.799838] env[61978]: DEBUG nova.compute.manager [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Refreshing instance network info cache due to event network-changed-a5290cfd-6d88-4c49-a54c-626d4c4843bd. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 904.799838] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] Acquiring lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.800069] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] Acquired lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.800124] env[61978]: DEBUG nova.network.neutron [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Refreshing network info cache for port a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 904.818095] env[61978]: DEBUG oslo_vmware.api [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394755, 'name': PowerOnVM_Task, 'duration_secs': 0.827403} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.818892] env[61978]: DEBUG nova.network.neutron [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 904.820903] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.821229] env[61978]: INFO nova.compute.manager [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Took 9.16 seconds to spawn the instance on the hypervisor. [ 904.821433] env[61978]: DEBUG nova.compute.manager [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 904.822507] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0fd0e7-96c1-47e6-b8f9-01f0f2df3262 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.845450] env[61978]: DEBUG nova.scheduler.client.report [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 904.999497] env[61978]: DEBUG oslo_concurrency.lockutils [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Acquiring lock "3ddf7322-5504-408f-af6c-af73fb1c4286" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.999786] env[61978]: DEBUG oslo_concurrency.lockutils [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Lock "3ddf7322-5504-408f-af6c-af73fb1c4286" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.000007] env[61978]: DEBUG oslo_concurrency.lockutils [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Acquiring lock "3ddf7322-5504-408f-af6c-af73fb1c4286-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.000357] env[61978]: DEBUG oslo_concurrency.lockutils [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Lock "3ddf7322-5504-408f-af6c-af73fb1c4286-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.000357] env[61978]: DEBUG oslo_concurrency.lockutils [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Lock "3ddf7322-5504-408f-af6c-af73fb1c4286-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.002710] env[61978]: INFO nova.compute.manager [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Terminating instance [ 905.005441] env[61978]: DEBUG nova.compute.manager [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 905.005660] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 905.006489] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e304daa3-c5b6-407a-8c70-c6f0fcbda550 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.015930] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.016205] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-184c3f5e-9bae-41d6-830d-72365ab15b0e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.024892] env[61978]: DEBUG oslo_vmware.api [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Waiting for the task: (returnval){ [ 905.024892] env[61978]: value = "task-1394756" [ 905.024892] env[61978]: _type = "Task" [ 905.024892] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.035594] env[61978]: DEBUG oslo_vmware.api [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394756, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.209091] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.302527] env[61978]: DEBUG nova.network.neutron [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Successfully updated port: 78b26e7f-9b45-42b2-8950-c83ae8b8b32f {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 905.354209] env[61978]: INFO nova.compute.manager [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Took 36.05 seconds to build instance. [ 905.355739] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.691s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.356258] env[61978]: DEBUG nova.compute.manager [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 905.363430] env[61978]: DEBUG nova.network.neutron [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Updating instance_info_cache with network_info: [{"id": "66a47730-43bc-4ac1-b494-0ec1041be9d2", "address": "fa:16:3e:bd:71:96", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66a47730-43", "ovs_interfaceid": "66a47730-43bc-4ac1-b494-0ec1041be9d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.364702] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.937s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.366904] env[61978]: INFO nova.compute.claims [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.553759] env[61978]: DEBUG oslo_vmware.api [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394756, 'name': PowerOffVM_Task, 'duration_secs': 0.194464} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.557341] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.557341] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 905.557341] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b62d931-a69b-4fcb-8e32-c029ebdd870b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.702220] env[61978]: DEBUG nova.network.neutron [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updated VIF entry in instance network info cache for port a5290cfd-6d88-4c49-a54c-626d4c4843bd. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 905.702463] env[61978]: DEBUG nova.network.neutron [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updating instance_info_cache with network_info: [{"id": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "address": "fa:16:3e:ed:43:9e", "network": {"id": "4b7af29e-cc4a-4765-8d05-6adf88573ad3", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1006801081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00c674bbf1e945ba946d844f9856fdfc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b67e519-46cf-44ce-b670-4ba4c0c5b658", "external-id": "nsx-vlan-transportzone-110", "segmentation_id": 110, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5290cfd-6d", "ovs_interfaceid": "a5290cfd-6d88-4c49-a54c-626d4c4843bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.808648] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "refresh_cache-bb0c149c-920e-47c4-a960-47b2fb443431" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.809069] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquired lock "refresh_cache-bb0c149c-920e-47c4-a960-47b2fb443431" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.809069] env[61978]: DEBUG nova.network.neutron [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 905.862035] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44b0e64a-34ae-4856-87be-16f00061192f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "76dff032-a806-4910-a48b-8850b05131c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.933s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.871131] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "refresh_cache-50788030-4dc2-4215-bf2c-acba5dd33ce4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.871292] env[61978]: DEBUG nova.compute.manager [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Instance network_info: |[{"id": "66a47730-43bc-4ac1-b494-0ec1041be9d2", "address": "fa:16:3e:bd:71:96", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66a47730-43", "ovs_interfaceid": "66a47730-43bc-4ac1-b494-0ec1041be9d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 905.872611] env[61978]: DEBUG nova.compute.utils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 905.874788] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:71:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4c8c8fd-baca-4e60-97dc-ff0418d63215', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66a47730-43bc-4ac1-b494-0ec1041be9d2', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.883152] env[61978]: DEBUG oslo.service.loopingcall [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 905.883484] env[61978]: DEBUG nova.compute.manager [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 905.884352] env[61978]: DEBUG nova.network.neutron [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 905.889351] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 905.892785] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0c973ad-9531-4cd8-937f-db20a6b6ed5f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.912585] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 905.912899] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 905.912986] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Deleting the datastore file [datastore2] 3ddf7322-5504-408f-af6c-af73fb1c4286 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 905.913846] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d785ac5-0dfc-4395-90dc-c2239a4f2f63 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.919687] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.919687] env[61978]: value = "task-1394758" [ 905.919687] env[61978]: _type = "Task" [ 905.919687] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.925117] env[61978]: DEBUG oslo_vmware.api [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Waiting for the task: (returnval){ [ 905.925117] env[61978]: value = "task-1394759" [ 905.925117] env[61978]: _type = "Task" [ 905.925117] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.931071] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394758, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.939987] env[61978]: DEBUG oslo_vmware.api [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394759, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.022213] env[61978]: DEBUG nova.policy [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7026a28592af41ebb4dd7df6cfa33feb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2af733ffc4384fa1a2c59f4a45f1778c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 906.207854] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] Releasing lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.207854] env[61978]: DEBUG nova.compute.manager [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Received event network-vif-plugged-66a47730-43bc-4ac1-b494-0ec1041be9d2 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 906.208389] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] Acquiring lock "50788030-4dc2-4215-bf2c-acba5dd33ce4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.209168] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] Lock "50788030-4dc2-4215-bf2c-acba5dd33ce4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.209168] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] Lock "50788030-4dc2-4215-bf2c-acba5dd33ce4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.210754] env[61978]: DEBUG nova.compute.manager [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] No waiting events found dispatching network-vif-plugged-66a47730-43bc-4ac1-b494-0ec1041be9d2 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 906.210754] env[61978]: WARNING nova.compute.manager [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Received unexpected event network-vif-plugged-66a47730-43bc-4ac1-b494-0ec1041be9d2 for instance with vm_state building and task_state spawning. [ 906.210754] env[61978]: DEBUG nova.compute.manager [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Received event network-changed-66a47730-43bc-4ac1-b494-0ec1041be9d2 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 906.210754] env[61978]: DEBUG nova.compute.manager [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Refreshing instance network info cache due to event network-changed-66a47730-43bc-4ac1-b494-0ec1041be9d2. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 906.210754] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] Acquiring lock "refresh_cache-50788030-4dc2-4215-bf2c-acba5dd33ce4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.210929] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] Acquired lock "refresh_cache-50788030-4dc2-4215-bf2c-acba5dd33ce4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.211122] env[61978]: DEBUG nova.network.neutron [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Refreshing network info cache for port 66a47730-43bc-4ac1-b494-0ec1041be9d2 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 906.237421] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.237421] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.365520] env[61978]: DEBUG nova.compute.manager [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 906.377163] env[61978]: DEBUG nova.compute.manager [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 906.394075] env[61978]: DEBUG nova.network.neutron [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 906.430818] env[61978]: DEBUG nova.network.neutron [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Successfully created port: 394a8251-684b-4ddc-ae5c-7ef7ec06b503 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 906.452947] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394758, 'name': CreateVM_Task, 'duration_secs': 0.382492} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.453490] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 906.453959] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.454400] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.454570] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 906.458076] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abf43a09-a0c5-4527-bd9f-a49035103cdd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.460565] env[61978]: DEBUG oslo_vmware.api [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Task: {'id': task-1394759, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170615} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.463308] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 906.463447] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 906.463626] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.463908] env[61978]: INFO nova.compute.manager [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Took 1.46 seconds to destroy the instance on the hypervisor. [ 906.464042] env[61978]: DEBUG oslo.service.loopingcall [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.468698] env[61978]: DEBUG nova.compute.manager [-] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 906.468804] env[61978]: DEBUG nova.network.neutron [-] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.471773] env[61978]: DEBUG nova.compute.manager [req-46866986-6501-42e9-9686-cc85890b18ac req-a7e7df45-4112-4b2d-b5ec-52078f77d8c7 service nova] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Received event network-vif-plugged-78b26e7f-9b45-42b2-8950-c83ae8b8b32f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 906.472448] env[61978]: DEBUG oslo_concurrency.lockutils [req-46866986-6501-42e9-9686-cc85890b18ac req-a7e7df45-4112-4b2d-b5ec-52078f77d8c7 service nova] Acquiring lock "bb0c149c-920e-47c4-a960-47b2fb443431-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.472448] env[61978]: DEBUG oslo_concurrency.lockutils [req-46866986-6501-42e9-9686-cc85890b18ac req-a7e7df45-4112-4b2d-b5ec-52078f77d8c7 service nova] Lock "bb0c149c-920e-47c4-a960-47b2fb443431-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.472448] env[61978]: DEBUG oslo_concurrency.lockutils [req-46866986-6501-42e9-9686-cc85890b18ac req-a7e7df45-4112-4b2d-b5ec-52078f77d8c7 service nova] Lock "bb0c149c-920e-47c4-a960-47b2fb443431-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.472593] env[61978]: DEBUG nova.compute.manager [req-46866986-6501-42e9-9686-cc85890b18ac req-a7e7df45-4112-4b2d-b5ec-52078f77d8c7 service nova] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] No waiting events found dispatching network-vif-plugged-78b26e7f-9b45-42b2-8950-c83ae8b8b32f {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 906.472862] env[61978]: WARNING nova.compute.manager [req-46866986-6501-42e9-9686-cc85890b18ac req-a7e7df45-4112-4b2d-b5ec-52078f77d8c7 service nova] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Received unexpected event network-vif-plugged-78b26e7f-9b45-42b2-8950-c83ae8b8b32f for instance with vm_state building and task_state spawning. [ 906.475400] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 906.475400] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52510ec0-7349-d39f-fb8c-f96fddd55eed" [ 906.475400] env[61978]: _type = "Task" [ 906.475400] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.489712] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52510ec0-7349-d39f-fb8c-f96fddd55eed, 'name': SearchDatastore_Task, 'duration_secs': 0.010967} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.492117] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.492473] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.492604] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.492732] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.492904] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.493183] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b47da645-ef07-4898-8757-a929d66139c2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.505480] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.505583] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 906.506333] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-001eec86-f1bf-4fac-a184-2b55e01fab8f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.516155] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 906.516155] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c3f4b3-7dfb-2b66-7e69-ce4f6f51b424" [ 906.516155] env[61978]: _type = "Task" [ 906.516155] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.526961] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c3f4b3-7dfb-2b66-7e69-ce4f6f51b424, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.664648] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Acquiring lock "dd686727-fc33-4dc4-b386-aabec27cf215" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.664864] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lock "dd686727-fc33-4dc4-b386-aabec27cf215" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.759778] env[61978]: DEBUG nova.network.neutron [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Updating instance_info_cache with network_info: [{"id": "78b26e7f-9b45-42b2-8950-c83ae8b8b32f", "address": "fa:16:3e:4f:59:53", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78b26e7f-9b", "ovs_interfaceid": "78b26e7f-9b45-42b2-8950-c83ae8b8b32f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.890796] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.924856] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade79a31-42e7-4df7-855a-b29ea0ca2e51 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.935714] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f52064-677d-42aa-83cd-e15fbf2a0cd1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.976630] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "b26a4784-698d-477a-8db7-58156899d231" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.976873] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "b26a4784-698d-477a-8db7-58156899d231" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.977067] env[61978]: DEBUG nova.compute.manager [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Going to confirm migration 1 {{(pid=61978) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 906.983076] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea95a5b5-a258-45ef-b33c-9d9d9d2b91fa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.991409] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6b7cfb-d06f-4318-9958-201e55ffb04c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.010200] env[61978]: DEBUG nova.compute.provider_tree [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.030478] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c3f4b3-7dfb-2b66-7e69-ce4f6f51b424, 'name': SearchDatastore_Task, 'duration_secs': 0.008839} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.031381] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-810e17f6-5046-4b8e-b712-06bdeabaa370 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.037729] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 907.037729] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526b0144-0082-038a-84a7-ae35beaf242d" [ 907.037729] env[61978]: _type = "Task" [ 907.037729] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.046024] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526b0144-0082-038a-84a7-ae35beaf242d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.093970] env[61978]: DEBUG nova.network.neutron [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Updated VIF entry in instance network info cache for port 66a47730-43bc-4ac1-b494-0ec1041be9d2. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 907.094491] env[61978]: DEBUG nova.network.neutron [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Updating instance_info_cache with network_info: [{"id": "66a47730-43bc-4ac1-b494-0ec1041be9d2", "address": "fa:16:3e:bd:71:96", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66a47730-43", "ovs_interfaceid": "66a47730-43bc-4ac1-b494-0ec1041be9d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.262267] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Releasing lock "refresh_cache-bb0c149c-920e-47c4-a960-47b2fb443431" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.262695] env[61978]: DEBUG nova.compute.manager [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Instance network_info: |[{"id": "78b26e7f-9b45-42b2-8950-c83ae8b8b32f", "address": "fa:16:3e:4f:59:53", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78b26e7f-9b", "ovs_interfaceid": "78b26e7f-9b45-42b2-8950-c83ae8b8b32f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 907.263215] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:59:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78b26e7f-9b45-42b2-8950-c83ae8b8b32f', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 907.270525] env[61978]: DEBUG oslo.service.loopingcall [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.270750] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 907.270971] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-038cb63d-47b9-439f-8988-187bb6e274dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.298997] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 907.298997] env[61978]: value = "task-1394760" [ 907.298997] env[61978]: _type = "Task" [ 907.298997] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.309780] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394760, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.389427] env[61978]: DEBUG nova.compute.manager [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 907.419368] env[61978]: DEBUG nova.virt.hardware [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 907.419672] env[61978]: DEBUG nova.virt.hardware [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 907.419872] env[61978]: DEBUG nova.virt.hardware [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.420136] env[61978]: DEBUG nova.virt.hardware [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 907.420326] env[61978]: DEBUG nova.virt.hardware [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.420523] env[61978]: DEBUG nova.virt.hardware [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 907.420863] env[61978]: DEBUG nova.virt.hardware [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 907.421084] env[61978]: DEBUG nova.virt.hardware [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 907.421276] env[61978]: DEBUG nova.virt.hardware [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 907.421451] env[61978]: DEBUG nova.virt.hardware [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 907.421638] env[61978]: DEBUG nova.virt.hardware [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 907.422522] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6822a3d9-fb1a-4d3b-8e42-75302c0f9e6d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.432384] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be24be0c-29ab-43a9-8226-da4437524190 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.487874] env[61978]: DEBUG nova.network.neutron [-] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.512999] env[61978]: DEBUG nova.scheduler.client.report [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 907.551340] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526b0144-0082-038a-84a7-ae35beaf242d, 'name': SearchDatastore_Task, 'duration_secs': 0.009593} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.551722] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.552023] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 50788030-4dc2-4215-bf2c-acba5dd33ce4/50788030-4dc2-4215-bf2c-acba5dd33ce4.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 907.552337] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-016070b0-4145-43cd-bfa5-450aca95b6e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.563192] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 907.563192] env[61978]: value = "task-1394761" [ 907.563192] env[61978]: _type = "Task" [ 907.563192] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.573568] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394761, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.597631] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb70ca89-30c4-415e-9de3-8987eca68713 req-0b181f5c-2d16-4a5a-a248-2c17e18a08c4 service nova] Releasing lock "refresh_cache-50788030-4dc2-4215-bf2c-acba5dd33ce4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.620873] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.621145] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.621372] env[61978]: DEBUG nova.network.neutron [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 907.621610] env[61978]: DEBUG nova.objects.instance [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lazy-loading 'info_cache' on Instance uuid b26a4784-698d-477a-8db7-58156899d231 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 907.816075] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394760, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.992691] env[61978]: INFO nova.compute.manager [-] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Took 1.52 seconds to deallocate network for instance. [ 908.019288] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.021437] env[61978]: DEBUG nova.compute.manager [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 908.024765] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.789s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.025267] env[61978]: DEBUG nova.objects.instance [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lazy-loading 'resources' on Instance uuid 96a38ed0-c880-4f21-9389-99f039279072 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 908.054149] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "3a30ecc4-455f-49cf-98e8-d38be6a1c5a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.054587] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "3a30ecc4-455f-49cf-98e8-d38be6a1c5a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.075704] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394761, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509719} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.075939] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 50788030-4dc2-4215-bf2c-acba5dd33ce4/50788030-4dc2-4215-bf2c-acba5dd33ce4.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 908.076288] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 908.076576] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f521eae9-e5e0-4f8a-8098-52f613adbd78 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.085811] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 908.085811] env[61978]: value = "task-1394762" [ 908.085811] env[61978]: _type = "Task" [ 908.085811] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.094879] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394762, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.141136] env[61978]: DEBUG nova.compute.manager [req-e99e980c-c517-492e-b61a-09c0790b5dd1 req-df6b8a94-aeda-41cc-b1f8-944dc4e8e6d8 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Received event network-changed-51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 908.141391] env[61978]: DEBUG nova.compute.manager [req-e99e980c-c517-492e-b61a-09c0790b5dd1 req-df6b8a94-aeda-41cc-b1f8-944dc4e8e6d8 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Refreshing instance network info cache due to event network-changed-51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 908.141805] env[61978]: DEBUG oslo_concurrency.lockutils [req-e99e980c-c517-492e-b61a-09c0790b5dd1 req-df6b8a94-aeda-41cc-b1f8-944dc4e8e6d8 service nova] Acquiring lock "refresh_cache-2f5b06f6-7178-4fdf-93b6-65477f020898" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.141805] env[61978]: DEBUG oslo_concurrency.lockutils [req-e99e980c-c517-492e-b61a-09c0790b5dd1 req-df6b8a94-aeda-41cc-b1f8-944dc4e8e6d8 service nova] Acquired lock "refresh_cache-2f5b06f6-7178-4fdf-93b6-65477f020898" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.141913] env[61978]: DEBUG nova.network.neutron [req-e99e980c-c517-492e-b61a-09c0790b5dd1 req-df6b8a94-aeda-41cc-b1f8-944dc4e8e6d8 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Refreshing network info cache for port 51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 908.312092] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394760, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.504817] env[61978]: DEBUG oslo_concurrency.lockutils [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.530320] env[61978]: DEBUG nova.compute.utils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 908.535827] env[61978]: DEBUG nova.compute.manager [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 908.536014] env[61978]: DEBUG nova.network.neutron [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 908.598958] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394762, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076112} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.600284] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 908.601471] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8624610-9b17-4449-8298-1a89e5589191 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.638160] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] 50788030-4dc2-4215-bf2c-acba5dd33ce4/50788030-4dc2-4215-bf2c-acba5dd33ce4.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.639879] env[61978]: DEBUG nova.policy [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0215baaec104ef8a361dcc37b11344f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e0b4b9239e545cd8bd19bc98e5fc5b8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 908.645483] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ceaad5f3-5046-454f-9225-3d19cc9e9e9c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.676913] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 908.676913] env[61978]: value = "task-1394763" [ 908.676913] env[61978]: _type = "Task" [ 908.676913] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.690803] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394763, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.725458] env[61978]: DEBUG nova.network.neutron [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Successfully updated port: 394a8251-684b-4ddc-ae5c-7ef7ec06b503 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 908.810779] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "76dff032-a806-4910-a48b-8850b05131c1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.812337] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "76dff032-a806-4910-a48b-8850b05131c1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.826215] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394760, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.028685] env[61978]: DEBUG nova.network.neutron [req-e99e980c-c517-492e-b61a-09c0790b5dd1 req-df6b8a94-aeda-41cc-b1f8-944dc4e8e6d8 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Updated VIF entry in instance network info cache for port 51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 909.029036] env[61978]: DEBUG nova.network.neutron [req-e99e980c-c517-492e-b61a-09c0790b5dd1 req-df6b8a94-aeda-41cc-b1f8-944dc4e8e6d8 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Updating instance_info_cache with network_info: [{"id": "51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f", "address": "fa:16:3e:7d:82:2a", "network": {"id": "0d7f3ee4-9789-4ca3-802e-627a975ec68f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-286598400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8499568efa184c35a99e2959f04273fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51cc84cf-63", "ovs_interfaceid": "51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.037716] env[61978]: DEBUG nova.compute.manager [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 909.097586] env[61978]: DEBUG nova.compute.manager [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Received event network-changed-78b26e7f-9b45-42b2-8950-c83ae8b8b32f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 909.097779] env[61978]: DEBUG nova.compute.manager [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Refreshing instance network info cache due to event network-changed-78b26e7f-9b45-42b2-8950-c83ae8b8b32f. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 909.097997] env[61978]: DEBUG oslo_concurrency.lockutils [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] Acquiring lock "refresh_cache-bb0c149c-920e-47c4-a960-47b2fb443431" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.098513] env[61978]: DEBUG oslo_concurrency.lockutils [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] Acquired lock "refresh_cache-bb0c149c-920e-47c4-a960-47b2fb443431" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.098780] env[61978]: DEBUG nova.network.neutron [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Refreshing network info cache for port 78b26e7f-9b45-42b2-8950-c83ae8b8b32f {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 909.196557] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394763, 'name': ReconfigVM_Task, 'duration_secs': 0.315061} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.197841] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Reconfigured VM instance instance-00000018 to attach disk [datastore2] 50788030-4dc2-4215-bf2c-acba5dd33ce4/50788030-4dc2-4215-bf2c-acba5dd33ce4.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.198571] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fdd28744-1b74-419a-95c1-97f01e48b486 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.202395] env[61978]: DEBUG nova.network.neutron [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance_info_cache with network_info: [{"id": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "address": "fa:16:3e:94:aa:92", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a886f4f-5f", "ovs_interfaceid": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.207700] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f48ceb-9747-402b-97bc-26bc4b871601 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.211046] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 909.211046] env[61978]: value = "task-1394764" [ 909.211046] env[61978]: _type = "Task" [ 909.211046] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.217915] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15c8319-122c-4603-82c8-cd30027c3aa9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.224176] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394764, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.258586] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.258763] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.258963] env[61978]: DEBUG nova.network.neutron [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 909.261870] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fc980f-1495-4637-82da-d7ea17088918 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.277719] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89ae7d7-2c42-4611-8db0-51b609d5f9aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.295942] env[61978]: DEBUG nova.compute.provider_tree [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 909.301916] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "ea1c2d74-70b4-4547-a887-78e291c3082a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.302205] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "ea1c2d74-70b4-4547-a887-78e291c3082a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.314994] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394760, 'name': CreateVM_Task, 'duration_secs': 1.642392} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.315392] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 909.319220] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.319220] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.319220] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 909.319220] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99381c11-e146-4f1b-8452-ef833cbb1ce9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.321329] env[61978]: DEBUG nova.compute.utils [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 909.327559] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 909.327559] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5203d298-f2eb-4487-ab75-4deb8bb2b705" [ 909.327559] env[61978]: _type = "Task" [ 909.327559] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.339648] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5203d298-f2eb-4487-ab75-4deb8bb2b705, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.427175] env[61978]: DEBUG nova.network.neutron [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Successfully created port: 6248c9c9-4f43-44c4-a25a-63b0c9920e89 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 909.531993] env[61978]: DEBUG oslo_concurrency.lockutils [req-e99e980c-c517-492e-b61a-09c0790b5dd1 req-df6b8a94-aeda-41cc-b1f8-944dc4e8e6d8 service nova] Releasing lock "refresh_cache-2f5b06f6-7178-4fdf-93b6-65477f020898" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.711528] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.711890] env[61978]: DEBUG nova.objects.instance [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lazy-loading 'migration_context' on Instance uuid b26a4784-698d-477a-8db7-58156899d231 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 909.729723] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394764, 'name': Rename_Task, 'duration_secs': 0.169302} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.730794] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.731039] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0e4f74c-1d3d-431e-b4ed-0040e3c86493 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.739167] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 909.739167] env[61978]: value = "task-1394765" [ 909.739167] env[61978]: _type = "Task" [ 909.739167] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.749242] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394765, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.794910] env[61978]: DEBUG nova.network.neutron [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 909.803133] env[61978]: DEBUG nova.scheduler.client.report [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 909.825030] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "76dff032-a806-4910-a48b-8850b05131c1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.014s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.843470] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5203d298-f2eb-4487-ab75-4deb8bb2b705, 'name': SearchDatastore_Task, 'duration_secs': 0.011958} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.843470] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.843470] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 909.843593] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.843739] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.843927] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 909.844676] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc0382f1-4565-4691-bcac-0eb775f62861 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.854860] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 909.855178] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 909.856179] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2989b4ba-03ce-496d-b39f-129f06276702 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.863268] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 909.863268] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52581e5e-e5a6-c5bd-7987-9dd50691cb69" [ 909.863268] env[61978]: _type = "Task" [ 909.863268] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.874904] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52581e5e-e5a6-c5bd-7987-9dd50691cb69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.005995] env[61978]: DEBUG nova.network.neutron [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance_info_cache with network_info: [{"id": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "address": "fa:16:3e:5c:ce:8c", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap394a8251-68", "ovs_interfaceid": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.052330] env[61978]: DEBUG nova.compute.manager [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 910.072729] env[61978]: DEBUG nova.network.neutron [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Updated VIF entry in instance network info cache for port 78b26e7f-9b45-42b2-8950-c83ae8b8b32f. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 910.073119] env[61978]: DEBUG nova.network.neutron [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Updating instance_info_cache with network_info: [{"id": "78b26e7f-9b45-42b2-8950-c83ae8b8b32f", "address": "fa:16:3e:4f:59:53", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78b26e7f-9b", "ovs_interfaceid": "78b26e7f-9b45-42b2-8950-c83ae8b8b32f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.086938] env[61978]: DEBUG nova.virt.hardware [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 910.087362] env[61978]: DEBUG nova.virt.hardware [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 910.087362] env[61978]: DEBUG nova.virt.hardware [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 910.087856] env[61978]: DEBUG nova.virt.hardware [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 910.087856] env[61978]: DEBUG nova.virt.hardware [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 910.087961] env[61978]: DEBUG nova.virt.hardware [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 910.088205] env[61978]: DEBUG nova.virt.hardware [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 910.088282] env[61978]: DEBUG nova.virt.hardware [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 910.088447] env[61978]: DEBUG nova.virt.hardware [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 910.088610] env[61978]: DEBUG nova.virt.hardware [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 910.088779] env[61978]: DEBUG nova.virt.hardware [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 910.090384] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c82897-af8c-458d-9aca-d9021f4520df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.100156] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0b747f-0636-435e-8dd6-e5395ed357d8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.222768] env[61978]: DEBUG nova.objects.base [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 910.224027] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5000d5a1-1daf-44f7-b7e9-cd21289c6cea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.247712] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c631f135-5d1b-49d6-91a6-810b5daa79c7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.256737] env[61978]: DEBUG oslo_vmware.api [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 910.256737] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ba260c-a305-4de7-0091-2d8aaede4343" [ 910.256737] env[61978]: _type = "Task" [ 910.256737] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.256995] env[61978]: DEBUG oslo_vmware.api [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394765, 'name': PowerOnVM_Task, 'duration_secs': 0.505202} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.257342] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.257567] env[61978]: INFO nova.compute.manager [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Took 9.24 seconds to spawn the instance on the hypervisor. [ 910.257758] env[61978]: DEBUG nova.compute.manager [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 910.261183] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3970eab8-bada-48b6-9d4d-62381c9742a1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.275023] env[61978]: DEBUG oslo_vmware.api [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ba260c-a305-4de7-0091-2d8aaede4343, 'name': SearchDatastore_Task, 'duration_secs': 0.007616} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.275023] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.309025] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.284s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.311053] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.060s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.312851] env[61978]: INFO nova.compute.claims [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 910.346032] env[61978]: INFO nova.scheduler.client.report [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Deleted allocations for instance 96a38ed0-c880-4f21-9389-99f039279072 [ 910.374746] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52581e5e-e5a6-c5bd-7987-9dd50691cb69, 'name': SearchDatastore_Task, 'duration_secs': 0.011589} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.375591] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24a814d4-6265-4737-b7d1-3b9e79921a17 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.382268] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 910.382268] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5209c1c7-6ccf-aa1c-1129-d0b5b0da7468" [ 910.382268] env[61978]: _type = "Task" [ 910.382268] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.393299] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5209c1c7-6ccf-aa1c-1129-d0b5b0da7468, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.510189] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.510613] env[61978]: DEBUG nova.compute.manager [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Instance network_info: |[{"id": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "address": "fa:16:3e:5c:ce:8c", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap394a8251-68", "ovs_interfaceid": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 910.511197] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:ce:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac7039c0-3374-4c08-87fc-af2449b48b02', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '394a8251-684b-4ddc-ae5c-7ef7ec06b503', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 910.523677] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating folder: Project (2af733ffc4384fa1a2c59f4a45f1778c). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 910.524093] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8415b56f-d497-4485-998f-6f98bc1ad163 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.551970] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Created folder: Project (2af733ffc4384fa1a2c59f4a45f1778c) in parent group-v295764. [ 910.552334] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating folder: Instances. Parent ref: group-v295831. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 910.552604] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7bcff92c-c3ba-40ef-9280-ca9cd8524200 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.564627] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Created folder: Instances in parent group-v295831. [ 910.564951] env[61978]: DEBUG oslo.service.loopingcall [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 910.565279] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 910.565512] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aef4a18f-7b3a-412b-9c88-d1c8e7a54fbc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.585620] env[61978]: DEBUG oslo_concurrency.lockutils [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] Releasing lock "refresh_cache-bb0c149c-920e-47c4-a960-47b2fb443431" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.585963] env[61978]: DEBUG nova.compute.manager [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Received event network-vif-deleted-a4813ec3-c879-4bdb-939f-21a96adfecf2 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 910.586228] env[61978]: DEBUG nova.compute.manager [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Received event network-vif-plugged-394a8251-684b-4ddc-ae5c-7ef7ec06b503 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 910.586487] env[61978]: DEBUG oslo_concurrency.lockutils [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] Acquiring lock "c17c986e-c008-4414-8dd1-4ea836458048-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.586748] env[61978]: DEBUG oslo_concurrency.lockutils [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] Lock "c17c986e-c008-4414-8dd1-4ea836458048-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.587466] env[61978]: DEBUG oslo_concurrency.lockutils [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] Lock "c17c986e-c008-4414-8dd1-4ea836458048-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.587466] env[61978]: DEBUG nova.compute.manager [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] No waiting events found dispatching network-vif-plugged-394a8251-684b-4ddc-ae5c-7ef7ec06b503 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 910.587632] env[61978]: WARNING nova.compute.manager [req-cca2e90e-5f34-44af-b902-9a389e28a29f req-25f2980d-1283-4cc6-99a6-1bae25f1cad1 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Received unexpected event network-vif-plugged-394a8251-684b-4ddc-ae5c-7ef7ec06b503 for instance with vm_state building and task_state spawning. [ 910.595591] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 910.595591] env[61978]: value = "task-1394768" [ 910.595591] env[61978]: _type = "Task" [ 910.595591] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.607977] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394768, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.784607] env[61978]: INFO nova.compute.manager [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Took 37.01 seconds to build instance. [ 910.854338] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1381da8b-bc20-4b1b-bd4f-ff091ab86870 tempest-ServersAdmin275Test-442011889 tempest-ServersAdmin275Test-442011889-project-member] Lock "96a38ed0-c880-4f21-9389-99f039279072" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.568s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.896061] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5209c1c7-6ccf-aa1c-1129-d0b5b0da7468, 'name': SearchDatastore_Task, 'duration_secs': 0.009512} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.896574] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.896896] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] bb0c149c-920e-47c4-a960-47b2fb443431/bb0c149c-920e-47c4-a960-47b2fb443431.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 910.897416] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ad16be0-0a34-4451-b3ae-a2f3c61fcd28 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.906903] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 910.906903] env[61978]: value = "task-1394769" [ 910.906903] env[61978]: _type = "Task" [ 910.906903] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.918155] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "76dff032-a806-4910-a48b-8850b05131c1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.918456] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "76dff032-a806-4910-a48b-8850b05131c1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.918738] env[61978]: INFO nova.compute.manager [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Attaching volume 6c6c1425-42c5-4c67-9006-17d5cd5278f1 to /dev/sdb [ 910.924428] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394769, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.992501] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9529ad-61d1-4d2a-821a-547492119b98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.001765] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a998a180-843f-476d-a4c8-7f4655b126f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.027555] env[61978]: DEBUG nova.virt.block_device [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Updating existing volume attachment record: 156c7e2b-34ef-48f2-b59c-4295b6818757 {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 911.112154] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394768, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.291778] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f707fa12-ab37-43cc-97b8-65c138d2d587 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "50788030-4dc2-4215-bf2c-acba5dd33ce4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.536s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.428709] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394769, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5164} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.429287] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] bb0c149c-920e-47c4-a960-47b2fb443431/bb0c149c-920e-47c4-a960-47b2fb443431.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 911.429287] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 911.431633] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bcb9c2a2-0aa8-4f2c-b06d-4354435a8a31 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.440453] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 911.440453] env[61978]: value = "task-1394772" [ 911.440453] env[61978]: _type = "Task" [ 911.440453] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.457673] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394772, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.556150] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "d2614f71-3026-41d4-ae04-eaede9b5ead5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.556499] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "d2614f71-3026-41d4-ae04-eaede9b5ead5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.556743] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "d2614f71-3026-41d4-ae04-eaede9b5ead5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.556978] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "d2614f71-3026-41d4-ae04-eaede9b5ead5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.557171] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "d2614f71-3026-41d4-ae04-eaede9b5ead5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.561616] env[61978]: INFO nova.compute.manager [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Terminating instance [ 911.563818] env[61978]: DEBUG nova.compute.manager [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 911.564047] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 911.565274] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d9d200-f46c-4282-b644-025600a8abb6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.582129] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.582412] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5da0bb6-8277-48ef-8fb1-c10a0bb51ba4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.589916] env[61978]: DEBUG nova.network.neutron [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Successfully updated port: 6248c9c9-4f43-44c4-a25a-63b0c9920e89 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 911.597250] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Acquiring lock "2f5b06f6-7178-4fdf-93b6-65477f020898" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.597250] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Lock "2f5b06f6-7178-4fdf-93b6-65477f020898" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.597250] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Acquiring lock "2f5b06f6-7178-4fdf-93b6-65477f020898-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.597601] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Lock "2f5b06f6-7178-4fdf-93b6-65477f020898-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.597601] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Lock "2f5b06f6-7178-4fdf-93b6-65477f020898-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.600169] env[61978]: INFO nova.compute.manager [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Terminating instance [ 911.613889] env[61978]: DEBUG nova.compute.manager [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 911.614196] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 911.615093] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2b9123-6a3d-4f7e-a0a3-65081909e9cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.631870] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394768, 'name': CreateVM_Task, 'duration_secs': 0.528544} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.631870] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 911.632095] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 911.632882] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.633062] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.633587] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 911.633656] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-960ed345-e7ef-4efc-991f-bf6ab05b6894 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.635665] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-031c3ed1-9649-44fc-8a45-4f67d288e65d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.641741] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 911.641741] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d79c95-8dde-75b2-1702-daf87c34c524" [ 911.641741] env[61978]: _type = "Task" [ 911.641741] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.652035] env[61978]: DEBUG oslo_vmware.api [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Waiting for the task: (returnval){ [ 911.652035] env[61978]: value = "task-1394775" [ 911.652035] env[61978]: _type = "Task" [ 911.652035] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.658194] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d79c95-8dde-75b2-1702-daf87c34c524, 'name': SearchDatastore_Task, 'duration_secs': 0.011047} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.659906] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.660161] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 911.660390] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.660531] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.660712] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 911.661925] env[61978]: DEBUG nova.compute.manager [req-a4423a15-6bf5-4327-87bb-b8c20d830ca7 req-2abdaba0-a4c1-47e2-8c77-e66bf0fd1295 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Received event network-changed-394a8251-684b-4ddc-ae5c-7ef7ec06b503 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 911.662076] env[61978]: DEBUG nova.compute.manager [req-a4423a15-6bf5-4327-87bb-b8c20d830ca7 req-2abdaba0-a4c1-47e2-8c77-e66bf0fd1295 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Refreshing instance network info cache due to event network-changed-394a8251-684b-4ddc-ae5c-7ef7ec06b503. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 911.662291] env[61978]: DEBUG oslo_concurrency.lockutils [req-a4423a15-6bf5-4327-87bb-b8c20d830ca7 req-2abdaba0-a4c1-47e2-8c77-e66bf0fd1295 service nova] Acquiring lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.662426] env[61978]: DEBUG oslo_concurrency.lockutils [req-a4423a15-6bf5-4327-87bb-b8c20d830ca7 req-2abdaba0-a4c1-47e2-8c77-e66bf0fd1295 service nova] Acquired lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.662578] env[61978]: DEBUG nova.network.neutron [req-a4423a15-6bf5-4327-87bb-b8c20d830ca7 req-2abdaba0-a4c1-47e2-8c77-e66bf0fd1295 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Refreshing network info cache for port 394a8251-684b-4ddc-ae5c-7ef7ec06b503 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 911.669344] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf04ca9e-f044-44b1-adcb-cea9d059078d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.673745] env[61978]: DEBUG oslo_vmware.api [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394775, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.685027] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 911.686019] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 911.686019] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8feb9e0c-07b0-4e6c-a08f-4f003f748f5a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.697128] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 911.697128] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528a9736-acc5-9670-6f02-4aff34a8e767" [ 911.697128] env[61978]: _type = "Task" [ 911.697128] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.708257] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528a9736-acc5-9670-6f02-4aff34a8e767, 'name': SearchDatastore_Task, 'duration_secs': 0.0108} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.709278] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-330b310e-8ba8-4f1b-b0eb-773f3999a3b7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.719568] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 911.719568] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522918e9-889a-126c-a5fe-e4b3de6f415e" [ 911.719568] env[61978]: _type = "Task" [ 911.719568] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.730159] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522918e9-889a-126c-a5fe-e4b3de6f415e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.753623] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 911.753867] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 911.754052] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Deleting the datastore file [datastore1] d2614f71-3026-41d4-ae04-eaede9b5ead5 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.754364] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04e6f95c-7472-41d2-af05-842f9838c0d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.762176] env[61978]: DEBUG oslo_vmware.api [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 911.762176] env[61978]: value = "task-1394776" [ 911.762176] env[61978]: _type = "Task" [ 911.762176] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.775315] env[61978]: DEBUG oslo_vmware.api [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.797947] env[61978]: DEBUG nova.compute.manager [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 911.957809] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394772, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118005} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.958217] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 911.959045] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f23179-2322-447c-8c3a-39cae4a204f6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.982767] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] bb0c149c-920e-47c4-a960-47b2fb443431/bb0c149c-920e-47c4-a960-47b2fb443431.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 911.986256] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2db8e8e9-1ce2-4429-94a7-0866ab823408 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.013359] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 912.013359] env[61978]: value = "task-1394777" [ 912.013359] env[61978]: _type = "Task" [ 912.013359] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.026647] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394777, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.033736] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a82d331-32a4-4837-b2d8-6fdc062e5fd1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.042179] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c759963c-8d5a-4cfc-87f7-dc015d12edca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.047712] env[61978]: DEBUG nova.network.neutron [req-a4423a15-6bf5-4327-87bb-b8c20d830ca7 req-2abdaba0-a4c1-47e2-8c77-e66bf0fd1295 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updated VIF entry in instance network info cache for port 394a8251-684b-4ddc-ae5c-7ef7ec06b503. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 912.048096] env[61978]: DEBUG nova.network.neutron [req-a4423a15-6bf5-4327-87bb-b8c20d830ca7 req-2abdaba0-a4c1-47e2-8c77-e66bf0fd1295 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance_info_cache with network_info: [{"id": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "address": "fa:16:3e:5c:ce:8c", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap394a8251-68", "ovs_interfaceid": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.079183] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72dffd5c-45ea-43f5-92da-12e9cf585098 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.088424] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830efed3-575d-40aa-b1bc-472eed169ddb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.093148] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Acquiring lock "refresh_cache-ff793464-9bef-449f-8485-36d3b8fb1d69" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.093315] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Acquired lock "refresh_cache-ff793464-9bef-449f-8485-36d3b8fb1d69" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.093469] env[61978]: DEBUG nova.network.neutron [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.107025] env[61978]: DEBUG nova.compute.provider_tree [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.164820] env[61978]: DEBUG oslo_vmware.api [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394775, 'name': PowerOffVM_Task, 'duration_secs': 0.211151} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.164820] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 912.164820] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 912.164820] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f16d35a1-1193-4e40-bc79-bc20c59e70b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.235016] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522918e9-889a-126c-a5fe-e4b3de6f415e, 'name': SearchDatastore_Task, 'duration_secs': 0.010403} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.235016] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.235016] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] c17c986e-c008-4414-8dd1-4ea836458048/c17c986e-c008-4414-8dd1-4ea836458048.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 912.235016] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9db26d0a-bbb3-4fef-af61-48fa9cf31dc4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.247109] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 912.247109] env[61978]: value = "task-1394779" [ 912.247109] env[61978]: _type = "Task" [ 912.247109] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.258570] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.260600] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 912.260600] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 912.260600] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Deleting the datastore file [datastore1] 2f5b06f6-7178-4fdf-93b6-65477f020898 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 912.260600] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dea43ee7-53b4-4f50-976d-042533ca1418 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.278982] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "8a21e6a7-c34e-4af0-b1fd-8a501694614c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.279296] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "8a21e6a7-c34e-4af0-b1fd-8a501694614c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.279792] env[61978]: DEBUG oslo_vmware.api [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394776, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183482} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.280162] env[61978]: DEBUG oslo_vmware.api [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Waiting for the task: (returnval){ [ 912.280162] env[61978]: value = "task-1394780" [ 912.280162] env[61978]: _type = "Task" [ 912.280162] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.280488] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 912.281013] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 912.281252] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 912.281473] env[61978]: INFO nova.compute.manager [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Took 0.72 seconds to destroy the instance on the hypervisor. [ 912.281742] env[61978]: DEBUG oslo.service.loopingcall [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.282337] env[61978]: DEBUG nova.compute.manager [-] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 912.282482] env[61978]: DEBUG nova.network.neutron [-] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 912.294394] env[61978]: DEBUG oslo_vmware.api [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394780, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.332991] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.530201] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394777, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.550665] env[61978]: DEBUG oslo_concurrency.lockutils [req-a4423a15-6bf5-4327-87bb-b8c20d830ca7 req-2abdaba0-a4c1-47e2-8c77-e66bf0fd1295 service nova] Releasing lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.610076] env[61978]: DEBUG nova.scheduler.client.report [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 912.631843] env[61978]: DEBUG nova.network.neutron [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.761550] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394779, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.796621] env[61978]: DEBUG oslo_vmware.api [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Task: {'id': task-1394780, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.343831} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.796780] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 912.796965] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 912.797682] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 912.797682] env[61978]: INFO nova.compute.manager [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Took 1.18 seconds to destroy the instance on the hypervisor. [ 912.797928] env[61978]: DEBUG oslo.service.loopingcall [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.798601] env[61978]: DEBUG nova.compute.manager [-] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 912.799142] env[61978]: DEBUG nova.network.neutron [-] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 912.882494] env[61978]: DEBUG nova.network.neutron [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Updating instance_info_cache with network_info: [{"id": "6248c9c9-4f43-44c4-a25a-63b0c9920e89", "address": "fa:16:3e:f1:b2:3a", "network": {"id": "8438071b-a3cd-4e2d-867f-1fa4a67bccca", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2102143534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e0b4b9239e545cd8bd19bc98e5fc5b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6248c9c9-4f", "ovs_interfaceid": "6248c9c9-4f43-44c4-a25a-63b0c9920e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.035286] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394777, 'name': ReconfigVM_Task, 'duration_secs': 0.764996} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.035286] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Reconfigured VM instance instance-00000017 to attach disk [datastore2] bb0c149c-920e-47c4-a960-47b2fb443431/bb0c149c-920e-47c4-a960-47b2fb443431.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 913.035286] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc68614e-04c0-4eb5-88d5-0fd94682b262 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.039242] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 913.039242] env[61978]: value = "task-1394781" [ 913.039242] env[61978]: _type = "Task" [ 913.039242] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.057100] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394781, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.057100] env[61978]: DEBUG nova.network.neutron [-] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.117346] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.806s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.117991] env[61978]: DEBUG nova.compute.manager [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 913.123104] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.393s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.123914] env[61978]: INFO nova.compute.claims [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.260655] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394779, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.750962} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.260917] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] c17c986e-c008-4414-8dd1-4ea836458048/c17c986e-c008-4414-8dd1-4ea836458048.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 913.261026] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 913.261296] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f386b98c-d3dd-4d6f-9017-2b1ee316f821 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.269608] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 913.269608] env[61978]: value = "task-1394782" [ 913.269608] env[61978]: _type = "Task" [ 913.269608] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.281602] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394782, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.324034] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.324034] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.387155] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Releasing lock "refresh_cache-ff793464-9bef-449f-8485-36d3b8fb1d69" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.387522] env[61978]: DEBUG nova.compute.manager [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Instance network_info: |[{"id": "6248c9c9-4f43-44c4-a25a-63b0c9920e89", "address": "fa:16:3e:f1:b2:3a", "network": {"id": "8438071b-a3cd-4e2d-867f-1fa4a67bccca", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2102143534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e0b4b9239e545cd8bd19bc98e5fc5b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6248c9c9-4f", "ovs_interfaceid": "6248c9c9-4f43-44c4-a25a-63b0c9920e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 913.387944] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:b2:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '462a7219-4deb-4225-9cf7-3131ef280363', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6248c9c9-4f43-44c4-a25a-63b0c9920e89', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 913.398198] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Creating folder: Project (6e0b4b9239e545cd8bd19bc98e5fc5b8). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 913.398854] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24b32358-672e-4118-8415-032d75fea2f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.414376] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Created folder: Project (6e0b4b9239e545cd8bd19bc98e5fc5b8) in parent group-v295764. [ 913.414704] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Creating folder: Instances. Parent ref: group-v295836. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 913.415100] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71dc6b48-c1a5-4d1f-b2f5-b73f402d0531 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.430895] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Created folder: Instances in parent group-v295836. [ 913.430895] env[61978]: DEBUG oslo.service.loopingcall [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 913.431097] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 913.431334] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6bda52a-15d1-4be9-b87c-a009c02c24c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.453852] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 913.453852] env[61978]: value = "task-1394785" [ 913.453852] env[61978]: _type = "Task" [ 913.453852] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.463069] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394785, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.553463] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394781, 'name': Rename_Task, 'duration_secs': 0.162323} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.555758] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 913.555758] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02760e9f-4a24-4e22-8b2b-7a1672802edd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.559985] env[61978]: INFO nova.compute.manager [-] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Took 1.28 seconds to deallocate network for instance. [ 913.562458] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 913.562458] env[61978]: value = "task-1394787" [ 913.562458] env[61978]: _type = "Task" [ 913.562458] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.576869] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394787, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.629440] env[61978]: DEBUG nova.compute.utils [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 913.635612] env[61978]: DEBUG nova.compute.manager [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 913.635612] env[61978]: DEBUG nova.network.neutron [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 913.704628] env[61978]: DEBUG nova.policy [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab0e06df466d46dab52f0e618e1be12a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eece5991af2241b5a2e30c69894be228', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 913.716630] env[61978]: DEBUG nova.network.neutron [-] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.781864] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394782, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.190623} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.782340] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 913.783167] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62410e5-2232-46f5-8065-8bd440d0ed49 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.809864] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] c17c986e-c008-4414-8dd1-4ea836458048/c17c986e-c008-4414-8dd1-4ea836458048.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.810574] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9228af2f-bcef-499a-987c-1907fc0ddfc2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.836563] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 913.836563] env[61978]: value = "task-1394788" [ 913.836563] env[61978]: _type = "Task" [ 913.836563] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.841149] env[61978]: DEBUG nova.compute.manager [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Received event network-vif-plugged-6248c9c9-4f43-44c4-a25a-63b0c9920e89 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 913.841192] env[61978]: DEBUG oslo_concurrency.lockutils [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] Acquiring lock "ff793464-9bef-449f-8485-36d3b8fb1d69-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.841472] env[61978]: DEBUG oslo_concurrency.lockutils [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] Lock "ff793464-9bef-449f-8485-36d3b8fb1d69-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.841589] env[61978]: DEBUG oslo_concurrency.lockutils [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] Lock "ff793464-9bef-449f-8485-36d3b8fb1d69-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.841906] env[61978]: DEBUG nova.compute.manager [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] No waiting events found dispatching network-vif-plugged-6248c9c9-4f43-44c4-a25a-63b0c9920e89 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 913.842126] env[61978]: WARNING nova.compute.manager [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Received unexpected event network-vif-plugged-6248c9c9-4f43-44c4-a25a-63b0c9920e89 for instance with vm_state building and task_state spawning. [ 913.842796] env[61978]: DEBUG nova.compute.manager [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Received event network-changed-6248c9c9-4f43-44c4-a25a-63b0c9920e89 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 913.842975] env[61978]: DEBUG nova.compute.manager [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Refreshing instance network info cache due to event network-changed-6248c9c9-4f43-44c4-a25a-63b0c9920e89. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 913.843592] env[61978]: DEBUG oslo_concurrency.lockutils [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] Acquiring lock "refresh_cache-ff793464-9bef-449f-8485-36d3b8fb1d69" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.843739] env[61978]: DEBUG oslo_concurrency.lockutils [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] Acquired lock "refresh_cache-ff793464-9bef-449f-8485-36d3b8fb1d69" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.843901] env[61978]: DEBUG nova.network.neutron [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Refreshing network info cache for port 6248c9c9-4f43-44c4-a25a-63b0c9920e89 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.859731] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394788, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.969159] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394785, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.061753] env[61978]: DEBUG nova.network.neutron [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Successfully created port: 9b850a07-34a8-4e1d-afff-7650895b0238 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 914.071910] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.078567] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394787, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.109398] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Acquiring lock "a4d45835-f065-445f-bcb6-d1b01d545cb0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.109853] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Lock "a4d45835-f065-445f-bcb6-d1b01d545cb0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.110384] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Acquiring lock "a4d45835-f065-445f-bcb6-d1b01d545cb0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.110721] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Lock "a4d45835-f065-445f-bcb6-d1b01d545cb0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.111727] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Lock "a4d45835-f065-445f-bcb6-d1b01d545cb0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.113675] env[61978]: INFO nova.compute.manager [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Terminating instance [ 914.115979] env[61978]: DEBUG nova.compute.manager [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 914.116348] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.117283] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be25c510-6edf-4655-b1ab-b3125f5d2f8f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.126054] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.126414] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45dcf10a-996a-45e7-ab2c-9fd37c2c034d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.134705] env[61978]: DEBUG oslo_vmware.api [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Waiting for the task: (returnval){ [ 914.134705] env[61978]: value = "task-1394789" [ 914.134705] env[61978]: _type = "Task" [ 914.134705] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.135518] env[61978]: DEBUG nova.compute.manager [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 914.159858] env[61978]: DEBUG oslo_vmware.api [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.219261] env[61978]: INFO nova.compute.manager [-] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Took 1.42 seconds to deallocate network for instance. [ 914.353825] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394788, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.470681] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394785, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.577303] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394787, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.598850] env[61978]: DEBUG nova.network.neutron [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Updated VIF entry in instance network info cache for port 6248c9c9-4f43-44c4-a25a-63b0c9920e89. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 914.598850] env[61978]: DEBUG nova.network.neutron [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Updating instance_info_cache with network_info: [{"id": "6248c9c9-4f43-44c4-a25a-63b0c9920e89", "address": "fa:16:3e:f1:b2:3a", "network": {"id": "8438071b-a3cd-4e2d-867f-1fa4a67bccca", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2102143534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e0b4b9239e545cd8bd19bc98e5fc5b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6248c9c9-4f", "ovs_interfaceid": "6248c9c9-4f43-44c4-a25a-63b0c9920e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.650515] env[61978]: INFO nova.virt.block_device [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Booting with volume 8895fd52-1e47-4be8-bc86-5cb974e51fe1 at /dev/sda [ 914.657184] env[61978]: DEBUG oslo_vmware.api [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394789, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.692409] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f072c7b-fd5a-4f38-96bb-599525162aac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.703193] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d209dcf-c56c-4194-bf76-94a924fda776 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.719314] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e069c5a5-4d1e-4159-accf-cb22640fca61 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.731982] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.733192] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233b7ca1-5454-4e21-9b9c-64b089e10e7a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.749211] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f311afd-6b5f-4ff0-9017-2e8f257d9c88 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.778102] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dea55c4-0faf-44ef-99ef-ab9493e2c657 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.783265] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9d148c-3f20-4d0e-9317-5a45c4d22b72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.799417] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19ab407-c44f-4338-82a7-dbb8fc3294e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.824620] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b60c65-8b84-4aba-bb31-6abd6458be0d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.827312] env[61978]: DEBUG nova.compute.provider_tree [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.834406] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561cfa81-b8e8-4e18-98c3-578b75133b1d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.850086] env[61978]: DEBUG nova.virt.block_device [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Updating existing volume attachment record: 2e5b8af4-0ddf-4463-82d3-ad2f0f713d1b {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 914.855888] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394788, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.966962] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394785, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.078405] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394787, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.100276] env[61978]: DEBUG oslo_concurrency.lockutils [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] Releasing lock "refresh_cache-ff793464-9bef-449f-8485-36d3b8fb1d69" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.100467] env[61978]: DEBUG nova.compute.manager [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Received event network-vif-deleted-f98d4797-3791-4132-9a71-1b520fa8e5e8 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 915.100710] env[61978]: DEBUG nova.compute.manager [req-e982790d-5846-40a6-a338-b2d8b8150429 req-bd944b1c-66b8-4c5a-a878-97235ce46152 service nova] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Received event network-vif-deleted-51cc84cf-63e4-4bb7-92e8-7f718bbc8a8f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 915.152759] env[61978]: DEBUG oslo_vmware.api [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394789, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.330918] env[61978]: DEBUG nova.scheduler.client.report [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 915.351449] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394788, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.469573] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394785, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.582804] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394787, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.651138] env[61978]: DEBUG nova.network.neutron [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Successfully updated port: 9b850a07-34a8-4e1d-afff-7650895b0238 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 915.658864] env[61978]: DEBUG oslo_vmware.api [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394789, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.837264] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.716s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.837691] env[61978]: DEBUG nova.compute.manager [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 915.840810] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.390s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.844019] env[61978]: DEBUG nova.objects.instance [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Lazy-loading 'resources' on Instance uuid 081339d7-6d9b-4b66-a816-467d23196c9a {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 915.854137] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394788, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.874524] env[61978]: DEBUG nova.compute.manager [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Received event network-vif-plugged-9b850a07-34a8-4e1d-afff-7650895b0238 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 915.874743] env[61978]: DEBUG oslo_concurrency.lockutils [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] Acquiring lock "e30d4a9f-1d75-453c-9552-2a0fbd4aa87d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.875150] env[61978]: DEBUG oslo_concurrency.lockutils [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] Lock "e30d4a9f-1d75-453c-9552-2a0fbd4aa87d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.875392] env[61978]: DEBUG oslo_concurrency.lockutils [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] Lock "e30d4a9f-1d75-453c-9552-2a0fbd4aa87d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.875576] env[61978]: DEBUG nova.compute.manager [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] No waiting events found dispatching network-vif-plugged-9b850a07-34a8-4e1d-afff-7650895b0238 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 915.875747] env[61978]: WARNING nova.compute.manager [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Received unexpected event network-vif-plugged-9b850a07-34a8-4e1d-afff-7650895b0238 for instance with vm_state building and task_state block_device_mapping. [ 915.875908] env[61978]: DEBUG nova.compute.manager [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Received event network-changed-9b850a07-34a8-4e1d-afff-7650895b0238 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 915.876076] env[61978]: DEBUG nova.compute.manager [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Refreshing instance network info cache due to event network-changed-9b850a07-34a8-4e1d-afff-7650895b0238. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 915.876270] env[61978]: DEBUG oslo_concurrency.lockutils [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] Acquiring lock "refresh_cache-e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.876409] env[61978]: DEBUG oslo_concurrency.lockutils [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] Acquired lock "refresh_cache-e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.876582] env[61978]: DEBUG nova.network.neutron [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Refreshing network info cache for port 9b850a07-34a8-4e1d-afff-7650895b0238 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 915.978887] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394785, 'name': CreateVM_Task, 'duration_secs': 2.459795} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.979059] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 915.980221] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.980394] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.980713] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 915.981603] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-080650f2-66a0-400c-8152-7adc7ba20276 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.988885] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Waiting for the task: (returnval){ [ 915.988885] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b85dde-d845-301d-949c-99ec3bd01dec" [ 915.988885] env[61978]: _type = "Task" [ 915.988885] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.003926] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b85dde-d845-301d-949c-99ec3bd01dec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.081519] env[61978]: DEBUG oslo_vmware.api [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394787, 'name': PowerOnVM_Task, 'duration_secs': 2.271049} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.081831] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 916.082096] env[61978]: INFO nova.compute.manager [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Took 12.41 seconds to spawn the instance on the hypervisor. [ 916.082351] env[61978]: DEBUG nova.compute.manager [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 916.083154] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad66de3-8732-4586-b8fe-e1c73b2af982 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.116600] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 916.116886] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295835', 'volume_id': '6c6c1425-42c5-4c67-9006-17d5cd5278f1', 'name': 'volume-6c6c1425-42c5-4c67-9006-17d5cd5278f1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '76dff032-a806-4910-a48b-8850b05131c1', 'attached_at': '', 'detached_at': '', 'volume_id': '6c6c1425-42c5-4c67-9006-17d5cd5278f1', 'serial': '6c6c1425-42c5-4c67-9006-17d5cd5278f1'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 916.118117] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d00218-33fd-421c-86b2-fefd74640aaa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.138704] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e23452-0ae8-4b0e-8933-632a3cd19b5c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.160378] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Acquiring lock "refresh_cache-e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.168452] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] volume-6c6c1425-42c5-4c67-9006-17d5cd5278f1/volume-6c6c1425-42c5-4c67-9006-17d5cd5278f1.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.172245] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2ebfa77-b876-4733-849f-0532a7b1a3fa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.192775] env[61978]: DEBUG oslo_vmware.api [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394789, 'name': PowerOffVM_Task, 'duration_secs': 1.709731} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.194817] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 916.194817] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 916.195068] env[61978]: DEBUG oslo_vmware.api [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 916.195068] env[61978]: value = "task-1394790" [ 916.195068] env[61978]: _type = "Task" [ 916.195068] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.195309] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63bef3a3-69f1-4239-98c3-e2e1ee40f7ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.205008] env[61978]: DEBUG oslo_vmware.api [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394790, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.347818] env[61978]: DEBUG nova.compute.utils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 916.347818] env[61978]: DEBUG nova.compute.manager [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 916.347818] env[61978]: DEBUG nova.network.neutron [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 916.362708] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394788, 'name': ReconfigVM_Task, 'duration_secs': 2.0444} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.363821] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Reconfigured VM instance instance-00000019 to attach disk [datastore2] c17c986e-c008-4414-8dd1-4ea836458048/c17c986e-c008-4414-8dd1-4ea836458048.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 916.364520] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8836dc31-5f67-4752-8af3-5e3b3e9431ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.373094] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 916.373094] env[61978]: value = "task-1394792" [ 916.373094] env[61978]: _type = "Task" [ 916.373094] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.386470] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394792, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.427549] env[61978]: DEBUG nova.network.neutron [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.432979] env[61978]: DEBUG nova.policy [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fea7938a2234dc4bf329bd30475192d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4d5745ee1ed64809a93b1e0b1dea11bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 916.503740] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b85dde-d845-301d-949c-99ec3bd01dec, 'name': SearchDatastore_Task, 'duration_secs': 0.012815} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.508223] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.508517] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 916.508798] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.508973] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.510739] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 916.510739] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eff3ec18-6983-4ca1-92a5-1f20f7a698a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.525580] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 916.525790] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 916.526674] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6587df7d-4835-4b5b-85d8-832b59721174 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.537670] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Waiting for the task: (returnval){ [ 916.537670] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f3c3c6-1697-850d-8cb3-8314188778fa" [ 916.537670] env[61978]: _type = "Task" [ 916.537670] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.548610] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f3c3c6-1697-850d-8cb3-8314188778fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.613508] env[61978]: INFO nova.compute.manager [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Took 42.79 seconds to build instance. [ 916.620056] env[61978]: DEBUG nova.network.neutron [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.719617] env[61978]: DEBUG oslo_vmware.api [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394790, 'name': ReconfigVM_Task, 'duration_secs': 0.43748} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.719705] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Reconfigured VM instance instance-00000016 to attach disk [datastore2] volume-6c6c1425-42c5-4c67-9006-17d5cd5278f1/volume-6c6c1425-42c5-4c67-9006-17d5cd5278f1.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 916.726647] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b733a8a-660e-4f82-bd2b-f2a98baf448f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.749819] env[61978]: DEBUG oslo_vmware.api [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 916.749819] env[61978]: value = "task-1394793" [ 916.749819] env[61978]: _type = "Task" [ 916.749819] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.758507] env[61978]: DEBUG oslo_vmware.api [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394793, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.856480] env[61978]: DEBUG nova.compute.manager [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 916.889299] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394792, 'name': Rename_Task, 'duration_secs': 0.155583} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.889299] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 916.889299] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed6908eb-9fdf-48a9-9d96-e8d3bdc0658e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.898019] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 916.898019] env[61978]: value = "task-1394794" [ 916.898019] env[61978]: _type = "Task" [ 916.898019] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.915581] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394794, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.982372] env[61978]: DEBUG nova.compute.manager [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 916.982372] env[61978]: DEBUG nova.virt.hardware [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 916.982575] env[61978]: DEBUG nova.virt.hardware [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 916.982575] env[61978]: DEBUG nova.virt.hardware [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 916.982742] env[61978]: DEBUG nova.virt.hardware [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 916.982885] env[61978]: DEBUG nova.virt.hardware [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 916.983044] env[61978]: DEBUG nova.virt.hardware [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 916.983267] env[61978]: DEBUG nova.virt.hardware [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 916.983425] env[61978]: DEBUG nova.virt.hardware [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 916.983581] env[61978]: DEBUG nova.virt.hardware [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 916.983741] env[61978]: DEBUG nova.virt.hardware [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 916.983912] env[61978]: DEBUG nova.virt.hardware [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 916.989227] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1bb9ff-fdf1-48a1-bde7-0fed573f6818 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.000681] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31de58fc-152a-4521-ba1a-f0a464116fd3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.007256] env[61978]: DEBUG nova.network.neutron [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Successfully created port: a33524bd-7627-49fa-ab70-55b0962b8ca3 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.033334] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88086a03-6f70-4ab4-9c9c-7b57e3b2ead3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.046913] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b23b24-a733-45d4-a324-8778164171a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.056425] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f3c3c6-1697-850d-8cb3-8314188778fa, 'name': SearchDatastore_Task, 'duration_secs': 0.01061} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.057772] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bbb098b-9bc9-4d21-8dd7-8fbd281ca034 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.088426] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1fc68b-93f3-4471-b1c9-954b8d38d984 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.092311] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Waiting for the task: (returnval){ [ 917.092311] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526155d2-5b3a-7467-15a9-9afda6dee8f1" [ 917.092311] env[61978]: _type = "Task" [ 917.092311] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.099708] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d7d933-6a9a-43ff-924b-d0ba96effa82 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.106847] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526155d2-5b3a-7467-15a9-9afda6dee8f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009992} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.107454] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.107723] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] ff793464-9bef-449f-8485-36d3b8fb1d69/ff793464-9bef-449f-8485-36d3b8fb1d69.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 917.107972] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33a60a8e-aaee-4c34-a473-342964d33bd5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.117432] env[61978]: DEBUG nova.compute.provider_tree [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.119897] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80a758e8-5793-4590-a7cc-36be1245be0e tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "bb0c149c-920e-47c4-a960-47b2fb443431" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.310s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.124484] env[61978]: DEBUG oslo_concurrency.lockutils [req-883b3c14-71b9-402e-a95b-52d19e0bede0 req-bd52f2ce-68fd-4b1a-8a25-a33ea9b62ea6 service nova] Releasing lock "refresh_cache-e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.124484] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Acquired lock "refresh_cache-e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.124484] env[61978]: DEBUG nova.network.neutron [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.127954] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Waiting for the task: (returnval){ [ 917.127954] env[61978]: value = "task-1394795" [ 917.127954] env[61978]: _type = "Task" [ 917.127954] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.138880] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.259978] env[61978]: DEBUG oslo_vmware.api [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394793, 'name': ReconfigVM_Task, 'duration_secs': 0.158342} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.260300] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295835', 'volume_id': '6c6c1425-42c5-4c67-9006-17d5cd5278f1', 'name': 'volume-6c6c1425-42c5-4c67-9006-17d5cd5278f1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '76dff032-a806-4910-a48b-8850b05131c1', 'attached_at': '', 'detached_at': '', 'volume_id': '6c6c1425-42c5-4c67-9006-17d5cd5278f1', 'serial': '6c6c1425-42c5-4c67-9006-17d5cd5278f1'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 917.417284] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394794, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.620987] env[61978]: DEBUG nova.scheduler.client.report [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 917.624782] env[61978]: DEBUG nova.compute.manager [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 917.644039] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394795, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.646837] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 917.647091] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 917.647283] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Deleting the datastore file [datastore1] a4d45835-f065-445f-bcb6-d1b01d545cb0 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.647568] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85787904-ecdc-4788-9c95-c759f2138b1a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.655464] env[61978]: DEBUG oslo_vmware.api [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Waiting for the task: (returnval){ [ 917.655464] env[61978]: value = "task-1394796" [ 917.655464] env[61978]: _type = "Task" [ 917.655464] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.670386] env[61978]: DEBUG oslo_vmware.api [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394796, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.694038] env[61978]: DEBUG nova.network.neutron [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 917.865920] env[61978]: DEBUG nova.compute.manager [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 917.897124] env[61978]: DEBUG nova.virt.hardware [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 917.897124] env[61978]: DEBUG nova.virt.hardware [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 917.897124] env[61978]: DEBUG nova.virt.hardware [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 917.897277] env[61978]: DEBUG nova.virt.hardware [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 917.897458] env[61978]: DEBUG nova.virt.hardware [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 917.897499] env[61978]: DEBUG nova.virt.hardware [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 917.897690] env[61978]: DEBUG nova.virt.hardware [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 917.898277] env[61978]: DEBUG nova.virt.hardware [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 917.898277] env[61978]: DEBUG nova.virt.hardware [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 917.898444] env[61978]: DEBUG nova.virt.hardware [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 917.898895] env[61978]: DEBUG nova.virt.hardware [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 917.903022] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e793838-0f6c-45f1-919b-9bb7de625943 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.917688] env[61978]: DEBUG oslo_vmware.api [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394794, 'name': PowerOnVM_Task, 'duration_secs': 0.546146} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.918967] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7e9934-a8aa-4c7d-89aa-393560891559 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.923569] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 917.923829] env[61978]: INFO nova.compute.manager [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Took 10.53 seconds to spawn the instance on the hypervisor. [ 917.924026] env[61978]: DEBUG nova.compute.manager [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 917.925255] env[61978]: DEBUG nova.network.neutron [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Updating instance_info_cache with network_info: [{"id": "9b850a07-34a8-4e1d-afff-7650895b0238", "address": "fa:16:3e:a8:d3:69", "network": {"id": "48011c8f-c34e-428d-a391-5540b4d1900a", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-115035202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eece5991af2241b5a2e30c69894be228", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b850a07-34", "ovs_interfaceid": "9b850a07-34a8-4e1d-afff-7650895b0238", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.927818] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2f4955-1cde-4228-8853-9bf29ba5d96a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.132315] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.291s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.137157] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.045s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.139439] env[61978]: INFO nova.compute.claims [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 918.157275] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394795, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.549014} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.161197] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] ff793464-9bef-449f-8485-36d3b8fb1d69/ff793464-9bef-449f-8485-36d3b8fb1d69.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 918.161437] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 918.161684] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90accf2e-7074-4124-b301-ab324b771707 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.171073] env[61978]: DEBUG oslo_vmware.api [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Task: {'id': task-1394796, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155899} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.173105] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.173316] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 918.173515] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 918.173695] env[61978]: INFO nova.compute.manager [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Took 4.06 seconds to destroy the instance on the hypervisor. [ 918.173942] env[61978]: DEBUG oslo.service.loopingcall [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.174577] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Waiting for the task: (returnval){ [ 918.174577] env[61978]: value = "task-1394797" [ 918.174577] env[61978]: _type = "Task" [ 918.174577] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.174743] env[61978]: DEBUG nova.compute.manager [-] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 918.174870] env[61978]: DEBUG nova.network.neutron [-] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 918.183081] env[61978]: INFO nova.scheduler.client.report [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Deleted allocations for instance 081339d7-6d9b-4b66-a816-467d23196c9a [ 918.199456] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394797, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.199456] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.304010] env[61978]: DEBUG nova.objects.instance [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lazy-loading 'flavor' on Instance uuid 76dff032-a806-4910-a48b-8850b05131c1 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.428973] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Releasing lock "refresh_cache-e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.429419] env[61978]: DEBUG nova.compute.manager [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Instance network_info: |[{"id": "9b850a07-34a8-4e1d-afff-7650895b0238", "address": "fa:16:3e:a8:d3:69", "network": {"id": "48011c8f-c34e-428d-a391-5540b4d1900a", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-115035202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eece5991af2241b5a2e30c69894be228", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b850a07-34", "ovs_interfaceid": "9b850a07-34a8-4e1d-afff-7650895b0238", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 918.430195] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:d3:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c8dbe25-bca7-4d91-b577-193b8b2aad8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b850a07-34a8-4e1d-afff-7650895b0238', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 918.439610] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Creating folder: Project (eece5991af2241b5a2e30c69894be228). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 918.445735] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d9bfc20-fe49-4dad-ba70-cc1b02bbc118 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.451307] env[61978]: INFO nova.compute.manager [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Took 38.08 seconds to build instance. [ 918.464777] env[61978]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 918.464777] env[61978]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61978) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 918.465700] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Folder already exists: Project (eece5991af2241b5a2e30c69894be228). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 918.465700] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Creating folder: Instances. Parent ref: group-v295806. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 918.465700] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-121a1a92-7abd-4c65-ad28-02ecff8ee0f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.477761] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Created folder: Instances in parent group-v295806. [ 918.478036] env[61978]: DEBUG oslo.service.loopingcall [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.478236] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 918.478451] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af6758a5-dece-4801-8f00-f66e39f059aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.503673] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 918.503673] env[61978]: value = "task-1394800" [ 918.503673] env[61978]: _type = "Task" [ 918.503673] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.515561] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394800, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.692607] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394797, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.172711} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.692607] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.692607] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb32923-5ca3-4e79-8a90-d0b56282a5d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.714904] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0caf5f75-fd3c-468d-8553-b19029046675 tempest-TenantUsagesTestJSON-1860624541 tempest-TenantUsagesTestJSON-1860624541-project-member] Lock "081339d7-6d9b-4b66-a816-467d23196c9a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.621s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.725117] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] ff793464-9bef-449f-8485-36d3b8fb1d69/ff793464-9bef-449f-8485-36d3b8fb1d69.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.727307] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e22446a-4950-4d20-ab9d-3a7f6d2e7080 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.747972] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.748565] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.754323] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Waiting for the task: (returnval){ [ 918.754323] env[61978]: value = "task-1394801" [ 918.754323] env[61978]: _type = "Task" [ 918.754323] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.768202] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394801, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.810672] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb58de92-2978-4873-ac0c-f746b4e4e74c tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "76dff032-a806-4910-a48b-8850b05131c1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.892s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.953144] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2a55e5c0-a9c2-461e-93c5-fc5a95b0cf0c tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "c17c986e-c008-4414-8dd1-4ea836458048" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.601s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.973351] env[61978]: DEBUG nova.network.neutron [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Successfully updated port: a33524bd-7627-49fa-ab70-55b0962b8ca3 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 919.019272] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394800, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.038858] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "bb0c149c-920e-47c4-a960-47b2fb443431" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.038858] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "bb0c149c-920e-47c4-a960-47b2fb443431" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.039110] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "bb0c149c-920e-47c4-a960-47b2fb443431-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.039328] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "bb0c149c-920e-47c4-a960-47b2fb443431-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.039500] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "bb0c149c-920e-47c4-a960-47b2fb443431-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.045261] env[61978]: INFO nova.compute.manager [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Terminating instance [ 919.050050] env[61978]: DEBUG nova.compute.manager [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 919.050294] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 919.051171] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210ce785-82fd-4574-a73a-33a2e0b0e9da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.060030] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 919.060030] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dff13fb1-90f1-4876-a34c-7aae12c33569 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.067923] env[61978]: DEBUG oslo_vmware.api [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 919.067923] env[61978]: value = "task-1394802" [ 919.067923] env[61978]: _type = "Task" [ 919.067923] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.078880] env[61978]: DEBUG oslo_vmware.api [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394802, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.229899] env[61978]: DEBUG nova.network.neutron [-] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.278015] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394801, 'name': ReconfigVM_Task, 'duration_secs': 0.505617} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.278131] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Reconfigured VM instance instance-0000001a to attach disk [datastore2] ff793464-9bef-449f-8485-36d3b8fb1d69/ff793464-9bef-449f-8485-36d3b8fb1d69.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.279637] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b84f8168-d698-4a93-8adc-93cb11be4f87 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.290430] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Waiting for the task: (returnval){ [ 919.290430] env[61978]: value = "task-1394803" [ 919.290430] env[61978]: _type = "Task" [ 919.290430] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.302556] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394803, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.375035] env[61978]: DEBUG nova.compute.manager [req-f8aff6bf-f7a2-4f41-920d-78f735b14ad3 req-4c723e40-9a0e-430c-9614-3d4c4a2c67b3 service nova] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Received event network-vif-deleted-2a7c8ac4-8bf9-436b-97e6-d95c0b1486ee {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.458672] env[61978]: DEBUG nova.compute.manager [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 919.474989] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Acquiring lock "refresh_cache-32bcb974-8db9-43e2-b397-b497f3a4f30c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.475217] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Acquired lock "refresh_cache-32bcb974-8db9-43e2-b397-b497f3a4f30c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.476260] env[61978]: DEBUG nova.network.neutron [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 919.518738] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394800, 'name': CreateVM_Task, 'duration_secs': 0.582358} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.521427] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 919.522936] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'disk_bus': None, 'device_type': None, 'attachment_id': '2e5b8af4-0ddf-4463-82d3-ad2f0f713d1b', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295814', 'volume_id': '8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'name': 'volume-8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e30d4a9f-1d75-453c-9552-2a0fbd4aa87d', 'attached_at': '', 'detached_at': '', 'volume_id': '8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'serial': '8895fd52-1e47-4be8-bc86-5cb974e51fe1'}, 'boot_index': 0, 'guest_format': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=61978) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 919.523159] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Root volume attach. Driver type: vmdk {{(pid=61978) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 919.524260] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894280c1-2b29-4502-a665-ef05c5b94311 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.540339] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7609bc86-3985-492e-adbb-2a0ebbfb9567 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.550507] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de3f5fd-b70e-4c5b-a4cc-273468edde96 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.565092] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-4a00068f-0a8d-4a00-8262-8260b7addb04 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.578714] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Waiting for the task: (returnval){ [ 919.578714] env[61978]: value = "task-1394804" [ 919.578714] env[61978]: _type = "Task" [ 919.578714] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.582429] env[61978]: DEBUG oslo_vmware.api [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394802, 'name': PowerOffVM_Task, 'duration_secs': 0.259208} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.585712] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 919.587113] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 919.587113] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09f989fc-c34b-4770-90be-882e99d11bd8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.596815] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394804, 'name': RelocateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.663203] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 919.665869] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 919.665869] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Deleting the datastore file [datastore2] bb0c149c-920e-47c4-a960-47b2fb443431 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 919.665869] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17c83bd4-dfb1-4240-9914-a4db7c4640e2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.678189] env[61978]: DEBUG oslo_vmware.api [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for the task: (returnval){ [ 919.678189] env[61978]: value = "task-1394806" [ 919.678189] env[61978]: _type = "Task" [ 919.678189] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.695994] env[61978]: DEBUG oslo_vmware.api [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394806, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.735441] env[61978]: INFO nova.compute.manager [-] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Took 1.56 seconds to deallocate network for instance. [ 919.748033] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.748033] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.803973] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394803, 'name': Rename_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.862972] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad928786-0194-484f-b5da-02a35d8a912b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.875908] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a321c6d5-87d8-47dd-8cb7-ab6e882fdde4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.915237] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6027e83c-467c-486e-bfa2-5fcac36ca57d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.929301] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb66ffd-3794-472a-bbad-71560ccfaa47 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.950889] env[61978]: DEBUG nova.compute.provider_tree [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.993188] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.033357] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "76dff032-a806-4910-a48b-8850b05131c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.034608] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "76dff032-a806-4910-a48b-8850b05131c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.034608] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "76dff032-a806-4910-a48b-8850b05131c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.034608] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "76dff032-a806-4910-a48b-8850b05131c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.034608] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "76dff032-a806-4910-a48b-8850b05131c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.037163] env[61978]: INFO nova.compute.manager [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Terminating instance [ 920.041548] env[61978]: DEBUG nova.compute.manager [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 920.042024] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 920.042302] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a69edcf7-aea8-497f-b430-09488d1fe430 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.052032] env[61978]: DEBUG nova.network.neutron [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 920.055330] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 920.055330] env[61978]: value = "task-1394807" [ 920.055330] env[61978]: _type = "Task" [ 920.055330] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.070601] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394807, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.100230] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394804, 'name': RelocateVM_Task} progress is 38%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.194336] env[61978]: DEBUG oslo_vmware.api [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Task: {'id': task-1394806, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156944} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.194336] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 920.194336] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 920.194336] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 920.194336] env[61978]: INFO nova.compute.manager [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Took 1.14 seconds to destroy the instance on the hypervisor. [ 920.194657] env[61978]: DEBUG oslo.service.loopingcall [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 920.194657] env[61978]: DEBUG nova.compute.manager [-] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 920.194657] env[61978]: DEBUG nova.network.neutron [-] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 920.253181] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.307169] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394803, 'name': Rename_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.315030] env[61978]: DEBUG nova.network.neutron [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Updating instance_info_cache with network_info: [{"id": "a33524bd-7627-49fa-ab70-55b0962b8ca3", "address": "fa:16:3e:17:23:76", "network": {"id": "263e1420-6a47-473d-a527-fa8b27d6c538", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-912409184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d5745ee1ed64809a93b1e0b1dea11bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa33524bd-76", "ovs_interfaceid": "a33524bd-7627-49fa-ab70-55b0962b8ca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.455669] env[61978]: DEBUG nova.scheduler.client.report [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 920.570498] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394807, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.600970] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394804, 'name': RelocateVM_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.621226] env[61978]: DEBUG nova.compute.manager [req-710efb3c-a62a-4216-a6c1-bdff41778d57 req-f8691abd-4111-4476-9392-c666485ac7b2 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Received event network-changed-394a8251-684b-4ddc-ae5c-7ef7ec06b503 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 920.621226] env[61978]: DEBUG nova.compute.manager [req-710efb3c-a62a-4216-a6c1-bdff41778d57 req-f8691abd-4111-4476-9392-c666485ac7b2 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Refreshing instance network info cache due to event network-changed-394a8251-684b-4ddc-ae5c-7ef7ec06b503. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 920.621226] env[61978]: DEBUG oslo_concurrency.lockutils [req-710efb3c-a62a-4216-a6c1-bdff41778d57 req-f8691abd-4111-4476-9392-c666485ac7b2 service nova] Acquiring lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.621226] env[61978]: DEBUG oslo_concurrency.lockutils [req-710efb3c-a62a-4216-a6c1-bdff41778d57 req-f8691abd-4111-4476-9392-c666485ac7b2 service nova] Acquired lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.621226] env[61978]: DEBUG nova.network.neutron [req-710efb3c-a62a-4216-a6c1-bdff41778d57 req-f8691abd-4111-4476-9392-c666485ac7b2 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Refreshing network info cache for port 394a8251-684b-4ddc-ae5c-7ef7ec06b503 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 920.813172] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394803, 'name': Rename_Task, 'duration_secs': 1.430181} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.813580] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 920.813922] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a477abd9-d003-490a-b1e6-edd362971245 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.817636] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Releasing lock "refresh_cache-32bcb974-8db9-43e2-b397-b497f3a4f30c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.818011] env[61978]: DEBUG nova.compute.manager [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Instance network_info: |[{"id": "a33524bd-7627-49fa-ab70-55b0962b8ca3", "address": "fa:16:3e:17:23:76", "network": {"id": "263e1420-6a47-473d-a527-fa8b27d6c538", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-912409184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d5745ee1ed64809a93b1e0b1dea11bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa33524bd-76", "ovs_interfaceid": "a33524bd-7627-49fa-ab70-55b0962b8ca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 920.819716] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:23:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16e15a36-a55b-4c27-b864-f284339009d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a33524bd-7627-49fa-ab70-55b0962b8ca3', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 920.829411] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Creating folder: Project (4d5745ee1ed64809a93b1e0b1dea11bf). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 920.831211] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b234aaf-3afd-40c4-b9d5-063c0d53ba52 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.833252] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Waiting for the task: (returnval){ [ 920.833252] env[61978]: value = "task-1394808" [ 920.833252] env[61978]: _type = "Task" [ 920.833252] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.843894] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394808, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.849845] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Created folder: Project (4d5745ee1ed64809a93b1e0b1dea11bf) in parent group-v295764. [ 920.849845] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Creating folder: Instances. Parent ref: group-v295841. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 920.850101] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-858c3d10-13ea-46d8-bfc5-39dbda385a9f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.867810] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Created folder: Instances in parent group-v295841. [ 920.868477] env[61978]: DEBUG oslo.service.loopingcall [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 920.868765] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 920.869235] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa119837-3130-4c0c-828c-69fe74aeafbf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.906178] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 920.906178] env[61978]: value = "task-1394811" [ 920.906178] env[61978]: _type = "Task" [ 920.906178] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.926224] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394811, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.965853] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.829s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.966416] env[61978]: DEBUG nova.compute.manager [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 920.970408] env[61978]: DEBUG oslo_concurrency.lockutils [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.031s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.970534] env[61978]: DEBUG nova.objects.instance [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lazy-loading 'resources' on Instance uuid a0762952-2afd-448a-8e46-ba788a4ca131 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.065488] env[61978]: DEBUG nova.network.neutron [-] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.072719] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394807, 'name': PowerOffVM_Task, 'duration_secs': 0.881214} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.073116] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.073336] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 921.073590] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295835', 'volume_id': '6c6c1425-42c5-4c67-9006-17d5cd5278f1', 'name': 'volume-6c6c1425-42c5-4c67-9006-17d5cd5278f1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '76dff032-a806-4910-a48b-8850b05131c1', 'attached_at': '', 'detached_at': '', 'volume_id': '6c6c1425-42c5-4c67-9006-17d5cd5278f1', 'serial': '6c6c1425-42c5-4c67-9006-17d5cd5278f1'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 921.074872] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669b9de7-28f5-40c0-95c9-3db130f70081 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.117483] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4479754-ee0f-4a3b-9880-d65447c0b2b1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.132434] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b7889e-14aa-49db-9688-96052a5cff89 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.136621] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394804, 'name': RelocateVM_Task} progress is 65%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.168742] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4287cb-5326-49ad-bd37-14641bcd8b45 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.190527] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] The volume has not been displaced from its original location: [datastore2] volume-6c6c1425-42c5-4c67-9006-17d5cd5278f1/volume-6c6c1425-42c5-4c67-9006-17d5cd5278f1.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 921.195945] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Reconfiguring VM instance instance-00000016 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 921.202168] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-757b240d-60e8-4c54-8dd8-e8c19a30a031 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.231957] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 921.231957] env[61978]: value = "task-1394812" [ 921.231957] env[61978]: _type = "Task" [ 921.231957] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.247773] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394812, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.358328] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394808, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.422634] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394811, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.478043] env[61978]: DEBUG nova.compute.utils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 921.482391] env[61978]: DEBUG nova.compute.manager [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 921.482391] env[61978]: DEBUG nova.network.neutron [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 921.554724] env[61978]: DEBUG nova.network.neutron [req-710efb3c-a62a-4216-a6c1-bdff41778d57 req-f8691abd-4111-4476-9392-c666485ac7b2 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updated VIF entry in instance network info cache for port 394a8251-684b-4ddc-ae5c-7ef7ec06b503. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 921.555895] env[61978]: DEBUG nova.network.neutron [req-710efb3c-a62a-4216-a6c1-bdff41778d57 req-f8691abd-4111-4476-9392-c666485ac7b2 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance_info_cache with network_info: [{"id": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "address": "fa:16:3e:5c:ce:8c", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap394a8251-68", "ovs_interfaceid": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.559761] env[61978]: DEBUG nova.policy [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56a2daa2961146cfaffd864fdd5380d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff197aac855441de95476c15480603cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 921.567412] env[61978]: INFO nova.compute.manager [-] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Took 1.37 seconds to deallocate network for instance. [ 921.628109] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394804, 'name': RelocateVM_Task} progress is 78%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.692801] env[61978]: DEBUG nova.compute.manager [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Received event network-vif-plugged-a33524bd-7627-49fa-ab70-55b0962b8ca3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 921.692875] env[61978]: DEBUG oslo_concurrency.lockutils [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] Acquiring lock "32bcb974-8db9-43e2-b397-b497f3a4f30c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.693147] env[61978]: DEBUG oslo_concurrency.lockutils [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] Lock "32bcb974-8db9-43e2-b397-b497f3a4f30c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.693527] env[61978]: DEBUG oslo_concurrency.lockutils [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] Lock "32bcb974-8db9-43e2-b397-b497f3a4f30c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.693810] env[61978]: DEBUG nova.compute.manager [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] No waiting events found dispatching network-vif-plugged-a33524bd-7627-49fa-ab70-55b0962b8ca3 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 921.693900] env[61978]: WARNING nova.compute.manager [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Received unexpected event network-vif-plugged-a33524bd-7627-49fa-ab70-55b0962b8ca3 for instance with vm_state building and task_state spawning. [ 921.694072] env[61978]: DEBUG nova.compute.manager [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Received event network-changed-a33524bd-7627-49fa-ab70-55b0962b8ca3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 921.694303] env[61978]: DEBUG nova.compute.manager [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Refreshing instance network info cache due to event network-changed-a33524bd-7627-49fa-ab70-55b0962b8ca3. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 921.694636] env[61978]: DEBUG oslo_concurrency.lockutils [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] Acquiring lock "refresh_cache-32bcb974-8db9-43e2-b397-b497f3a4f30c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.694848] env[61978]: DEBUG oslo_concurrency.lockutils [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] Acquired lock "refresh_cache-32bcb974-8db9-43e2-b397-b497f3a4f30c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.694962] env[61978]: DEBUG nova.network.neutron [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Refreshing network info cache for port a33524bd-7627-49fa-ab70-55b0962b8ca3 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.753754] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394812, 'name': ReconfigVM_Task, 'duration_secs': 0.346238} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.754360] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Reconfigured VM instance instance-00000016 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 921.762743] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6df7bb0-7f55-4a88-8202-53e6f17de1ae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.790215] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 921.790215] env[61978]: value = "task-1394813" [ 921.790215] env[61978]: _type = "Task" [ 921.790215] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.805350] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394813, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.855608] env[61978]: DEBUG oslo_vmware.api [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394808, 'name': PowerOnVM_Task, 'duration_secs': 0.568214} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.860605] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 921.860875] env[61978]: INFO nova.compute.manager [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Took 11.81 seconds to spawn the instance on the hypervisor. [ 921.861100] env[61978]: DEBUG nova.compute.manager [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 921.862410] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19e8c6e-9641-4a42-acfc-2a1aa1ec16ba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.928148] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394811, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.981473] env[61978]: DEBUG nova.compute.manager [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 921.997867] env[61978]: DEBUG nova.network.neutron [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Successfully created port: cff12603-dd53-4dec-ad6d-70278b3ac575 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 922.057993] env[61978]: DEBUG oslo_concurrency.lockutils [req-710efb3c-a62a-4216-a6c1-bdff41778d57 req-f8691abd-4111-4476-9392-c666485ac7b2 service nova] Releasing lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.081402] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.128941] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394804, 'name': RelocateVM_Task} progress is 92%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.158274] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9576bcab-9a4d-4c0a-ac34-6acdef8de404 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.172230] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb7bf49-a341-42e0-ae83-38d17a2aab35 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.209564] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a289515-c005-4779-8d9c-91247c6db7d3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.219103] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7abb88-77f1-422f-add4-35a290b0e9a1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.236830] env[61978]: DEBUG nova.compute.provider_tree [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.305341] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394813, 'name': ReconfigVM_Task, 'duration_secs': 0.301152} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.305702] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295835', 'volume_id': '6c6c1425-42c5-4c67-9006-17d5cd5278f1', 'name': 'volume-6c6c1425-42c5-4c67-9006-17d5cd5278f1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '76dff032-a806-4910-a48b-8850b05131c1', 'attached_at': '', 'detached_at': '', 'volume_id': '6c6c1425-42c5-4c67-9006-17d5cd5278f1', 'serial': '6c6c1425-42c5-4c67-9006-17d5cd5278f1'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 922.306037] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 922.306863] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b967f834-6043-426e-adbe-f5c3917b965b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.316735] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 922.317111] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4480fa4a-62b8-4386-955f-f373088a570c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.405597] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 922.405887] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 922.406167] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleting the datastore file [datastore2] 76dff032-a806-4910-a48b-8850b05131c1 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 922.406810] env[61978]: INFO nova.compute.manager [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Took 42.01 seconds to build instance. [ 922.408904] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-90dc4ac9-6baa-4c02-8057-0a3046e8a70a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.424082] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394811, 'name': CreateVM_Task, 'duration_secs': 1.421663} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.425390] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 922.425809] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 922.425809] env[61978]: value = "task-1394815" [ 922.425809] env[61978]: _type = "Task" [ 922.425809] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.426569] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.426781] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.427617] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 922.428071] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0418dfd2-a716-44f1-b957-05b63aa65162 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.438229] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Waiting for the task: (returnval){ [ 922.438229] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ce7062-40b8-c2c3-a2e6-48e0a2773d03" [ 922.438229] env[61978]: _type = "Task" [ 922.438229] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.442138] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394815, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.455412] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ce7062-40b8-c2c3-a2e6-48e0a2773d03, 'name': SearchDatastore_Task, 'duration_secs': 0.011592} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.455683] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.455919] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 922.456174] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.456319] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.456495] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 922.459074] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-659cef6b-efe5-4def-b894-f98ae63b2c5d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.468603] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 922.468603] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 922.468603] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d778cbd5-4375-41bc-b069-9fde7c446278 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.472957] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Waiting for the task: (returnval){ [ 922.472957] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]525f50dd-8229-6214-bfe8-e6db267d256f" [ 922.472957] env[61978]: _type = "Task" [ 922.472957] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.484078] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]525f50dd-8229-6214-bfe8-e6db267d256f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.613607] env[61978]: DEBUG nova.network.neutron [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Updated VIF entry in instance network info cache for port a33524bd-7627-49fa-ab70-55b0962b8ca3. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.614241] env[61978]: DEBUG nova.network.neutron [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Updating instance_info_cache with network_info: [{"id": "a33524bd-7627-49fa-ab70-55b0962b8ca3", "address": "fa:16:3e:17:23:76", "network": {"id": "263e1420-6a47-473d-a527-fa8b27d6c538", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-912409184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d5745ee1ed64809a93b1e0b1dea11bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa33524bd-76", "ovs_interfaceid": "a33524bd-7627-49fa-ab70-55b0962b8ca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.631852] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394804, 'name': RelocateVM_Task} progress is 97%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.741278] env[61978]: DEBUG nova.scheduler.client.report [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 922.913310] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d79d72e8-d0ac-4aec-8979-d3b4c7fa0218 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Lock "ff793464-9bef-449f-8485-36d3b8fb1d69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.542s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.937841] env[61978]: DEBUG oslo_vmware.api [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1394815, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161019} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.938061] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.938245] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 922.938483] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 922.938589] env[61978]: INFO nova.compute.manager [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Took 2.90 seconds to destroy the instance on the hypervisor. [ 922.938815] env[61978]: DEBUG oslo.service.loopingcall [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.938996] env[61978]: DEBUG nova.compute.manager [-] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 922.939107] env[61978]: DEBUG nova.network.neutron [-] [instance: 76dff032-a806-4910-a48b-8850b05131c1] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 922.983582] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]525f50dd-8229-6214-bfe8-e6db267d256f, 'name': SearchDatastore_Task, 'duration_secs': 0.010541} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.984509] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21e25289-7949-4d46-a4f4-4829bcbb7417 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.990900] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Waiting for the task: (returnval){ [ 922.990900] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b67791-ff49-6624-f819-150fcd9510d0" [ 922.990900] env[61978]: _type = "Task" [ 922.990900] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.994931] env[61978]: DEBUG nova.compute.manager [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 923.010645] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b67791-ff49-6624-f819-150fcd9510d0, 'name': SearchDatastore_Task, 'duration_secs': 0.009903} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.010967] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.011261] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 32bcb974-8db9-43e2-b397-b497f3a4f30c/32bcb974-8db9-43e2-b397-b497f3a4f30c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 923.011535] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7167a072-f2b0-4ee5-82a6-faece81b7df5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.019586] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Waiting for the task: (returnval){ [ 923.019586] env[61978]: value = "task-1394816" [ 923.019586] env[61978]: _type = "Task" [ 923.019586] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.031181] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394816, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.041636] env[61978]: DEBUG nova.virt.hardware [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 923.041883] env[61978]: DEBUG nova.virt.hardware [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 923.042045] env[61978]: DEBUG nova.virt.hardware [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.042261] env[61978]: DEBUG nova.virt.hardware [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 923.042418] env[61978]: DEBUG nova.virt.hardware [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.042565] env[61978]: DEBUG nova.virt.hardware [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 923.042884] env[61978]: DEBUG nova.virt.hardware [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 923.043345] env[61978]: DEBUG nova.virt.hardware [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 923.043551] env[61978]: DEBUG nova.virt.hardware [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 923.043724] env[61978]: DEBUG nova.virt.hardware [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 923.043898] env[61978]: DEBUG nova.virt.hardware [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 923.044985] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a5e0e5-703f-426e-ba70-6f42fcb89220 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.053875] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd259f40-0402-475b-9c73-191dc3a063d4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.123018] env[61978]: DEBUG oslo_concurrency.lockutils [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] Releasing lock "refresh_cache-32bcb974-8db9-43e2-b397-b497f3a4f30c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.123495] env[61978]: DEBUG nova.compute.manager [req-35ceae9e-b5c7-46c8-bf4c-cb250665d4d3 req-662d0ae0-2554-410b-8ea3-f824e5a75357 service nova] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Received event network-vif-deleted-78b26e7f-9b45-42b2-8950-c83ae8b8b32f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 923.131320] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394804, 'name': RelocateVM_Task} progress is 98%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.202029] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Acquiring lock "f1001633-e4e5-4de1-8a6b-cf653e43d821" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.202809] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Lock "f1001633-e4e5-4de1-8a6b-cf653e43d821" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.245390] env[61978]: DEBUG oslo_concurrency.lockutils [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.275s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.249032] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.291s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.250084] env[61978]: INFO nova.compute.claims [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 923.291863] env[61978]: INFO nova.scheduler.client.report [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Deleted allocations for instance a0762952-2afd-448a-8e46-ba788a4ca131 [ 923.417197] env[61978]: DEBUG nova.compute.manager [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 923.553025] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394816, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505458} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.553025] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 32bcb974-8db9-43e2-b397-b497f3a4f30c/32bcb974-8db9-43e2-b397-b497f3a4f30c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 923.553025] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 923.553025] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-822407c7-d7d6-474f-994a-fee677b7ceed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.562019] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Waiting for the task: (returnval){ [ 923.562019] env[61978]: value = "task-1394817" [ 923.562019] env[61978]: _type = "Task" [ 923.562019] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.571644] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394817, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.629997] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394804, 'name': RelocateVM_Task, 'duration_secs': 3.723083} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.630748] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 923.631143] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295814', 'volume_id': '8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'name': 'volume-8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e30d4a9f-1d75-453c-9552-2a0fbd4aa87d', 'attached_at': '', 'detached_at': '', 'volume_id': '8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'serial': '8895fd52-1e47-4be8-bc86-5cb974e51fe1'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 923.632264] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff27aa21-2801-4fc6-89ab-e80814e87968 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.652938] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb3e137-f62f-419b-9057-1cb01ea53055 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.682220] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] volume-8895fd52-1e47-4be8-bc86-5cb974e51fe1/volume-8895fd52-1e47-4be8-bc86-5cb974e51fe1.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 923.682554] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70a486c8-f71b-49da-8385-ddc9eee5d982 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.704150] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Waiting for the task: (returnval){ [ 923.704150] env[61978]: value = "task-1394818" [ 923.704150] env[61978]: _type = "Task" [ 923.704150] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.716588] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394818, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.804674] env[61978]: DEBUG oslo_concurrency.lockutils [None req-85a754c7-d5c0-48ce-b061-7428eb21e14f tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "a0762952-2afd-448a-8e46-ba788a4ca131" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.142s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.805780] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "a0762952-2afd-448a-8e46-ba788a4ca131" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 20.844s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.805928] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "a0762952-2afd-448a-8e46-ba788a4ca131-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.806150] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "a0762952-2afd-448a-8e46-ba788a4ca131-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.806310] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "a0762952-2afd-448a-8e46-ba788a4ca131-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.823764] env[61978]: INFO nova.compute.manager [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Terminating instance [ 923.823764] env[61978]: DEBUG nova.network.neutron [-] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.823764] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.823764] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquired lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.823764] env[61978]: DEBUG nova.network.neutron [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 923.945839] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.047281] env[61978]: DEBUG nova.compute.manager [req-afdbe3a2-b127-469a-8845-96bc0b1f0c88 req-e8299fdc-f67d-410a-9770-be76dcdc8412 service nova] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Received event network-vif-deleted-f28bab30-b505-494a-97cf-e0d85ff19cda {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 924.071548] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394817, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.248988} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.072425] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 924.072549] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c77906-961d-4108-bde8-57403aa87531 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.096683] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] 32bcb974-8db9-43e2-b397-b497f3a4f30c/32bcb974-8db9-43e2-b397-b497f3a4f30c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 924.096905] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a7c28f0-3fd7-4974-907c-6e69f48282bf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.118934] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Waiting for the task: (returnval){ [ 924.118934] env[61978]: value = "task-1394819" [ 924.118934] env[61978]: _type = "Task" [ 924.118934] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.132536] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394819, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.216210] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394818, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.316570] env[61978]: INFO nova.compute.manager [-] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Took 1.37 seconds to deallocate network for instance. [ 924.316570] env[61978]: DEBUG nova.compute.utils [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Can not refresh info_cache because instance was not found {{(pid=61978) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 924.357713] env[61978]: DEBUG nova.network.neutron [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 924.420122] env[61978]: DEBUG nova.network.neutron [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Successfully updated port: cff12603-dd53-4dec-ad6d-70278b3ac575 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 924.529127] env[61978]: DEBUG nova.network.neutron [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.635447] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394819, 'name': ReconfigVM_Task, 'duration_secs': 0.334482} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.635763] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Reconfigured VM instance instance-0000001c to attach disk [datastore2] 32bcb974-8db9-43e2-b397-b497f3a4f30c/32bcb974-8db9-43e2-b397-b497f3a4f30c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 924.636557] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52ca143b-5e83-42e5-a28e-55ea58344a66 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.645256] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Waiting for the task: (returnval){ [ 924.645256] env[61978]: value = "task-1394820" [ 924.645256] env[61978]: _type = "Task" [ 924.645256] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.658312] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394820, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.719117] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394818, 'name': ReconfigVM_Task, 'duration_secs': 0.672116} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.719472] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Reconfigured VM instance instance-0000001b to attach disk [datastore2] volume-8895fd52-1e47-4be8-bc86-5cb974e51fe1/volume-8895fd52-1e47-4be8-bc86-5cb974e51fe1.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 924.724449] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0991244-0c35-44a9-8ba6-1c498b0c39c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.748037] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Waiting for the task: (returnval){ [ 924.748037] env[61978]: value = "task-1394821" [ 924.748037] env[61978]: _type = "Task" [ 924.748037] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.765541] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394821, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.848463] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf90385-eee9-47aa-ae7f-03c22858c1d8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.861886] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1971dd56-d59e-4c7b-937d-17e8c9a8190d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.894220] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495b4ddc-93ee-45dd-84b5-0d4e2fcb35e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.904830] env[61978]: INFO nova.compute.manager [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Took 0.59 seconds to detach 1 volumes for instance. [ 924.907649] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6659c1ab-1f5a-4809-a0d5-32a2c59d304d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.928740] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Acquiring lock "refresh_cache-b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.928824] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Acquired lock "refresh_cache-b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.928996] env[61978]: DEBUG nova.network.neutron [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 924.930461] env[61978]: DEBUG nova.compute.provider_tree [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.036037] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Releasing lock "refresh_cache-a0762952-2afd-448a-8e46-ba788a4ca131" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.036312] env[61978]: DEBUG nova.compute.manager [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 925.036312] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.036929] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d010e6e0-17d0-463f-bcbf-b30de87f0c0f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.047824] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbc6cf1-01f5-4933-b06d-3f25529c827c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.085532] env[61978]: WARNING nova.virt.vmwareapi.vmops [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a0762952-2afd-448a-8e46-ba788a4ca131 could not be found. [ 925.085711] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 925.086016] env[61978]: INFO nova.compute.manager [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Took 0.05 seconds to destroy the instance on the hypervisor. [ 925.087153] env[61978]: DEBUG oslo.service.loopingcall [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 925.087153] env[61978]: DEBUG nova.compute.manager [-] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 925.087153] env[61978]: DEBUG nova.network.neutron [-] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 925.110822] env[61978]: DEBUG nova.network.neutron [-] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 925.158306] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394820, 'name': Rename_Task, 'duration_secs': 0.164286} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.158306] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 925.158306] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ab942f3-9185-404f-8fee-aef6926e009d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.166304] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Waiting for the task: (returnval){ [ 925.166304] env[61978]: value = "task-1394822" [ 925.166304] env[61978]: _type = "Task" [ 925.166304] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.175992] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394822, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.243974] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.244380] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.244578] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "9b6b4da7-4f86-46bc-a75f-fc5e1126c53b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.244768] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "9b6b4da7-4f86-46bc-a75f-fc5e1126c53b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.244939] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "9b6b4da7-4f86-46bc-a75f-fc5e1126c53b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.248723] env[61978]: INFO nova.compute.manager [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Terminating instance [ 925.257414] env[61978]: DEBUG nova.compute.manager [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 925.257579] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.258697] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b2cd02-d9c1-4134-92c2-6cafa922305f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.267864] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394821, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.270144] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.271028] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bafc7f1e-f0a4-4c48-ae5b-84ae53ccefc3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.278563] env[61978]: DEBUG oslo_vmware.api [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 925.278563] env[61978]: value = "task-1394823" [ 925.278563] env[61978]: _type = "Task" [ 925.278563] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.288773] env[61978]: DEBUG oslo_vmware.api [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.416610] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.435011] env[61978]: DEBUG nova.scheduler.client.report [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 925.474188] env[61978]: DEBUG nova.network.neutron [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 925.613813] env[61978]: DEBUG nova.network.neutron [-] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.632253] env[61978]: DEBUG nova.network.neutron [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Updating instance_info_cache with network_info: [{"id": "cff12603-dd53-4dec-ad6d-70278b3ac575", "address": "fa:16:3e:7d:3d:fd", "network": {"id": "6d985334-1a92-44ff-8c2d-568c7ec11134", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-115854966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff197aac855441de95476c15480603cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff12603-dd", "ovs_interfaceid": "cff12603-dd53-4dec-ad6d-70278b3ac575", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.678126] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394822, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.760314] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394821, 'name': ReconfigVM_Task, 'duration_secs': 0.68986} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.760664] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295814', 'volume_id': '8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'name': 'volume-8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e30d4a9f-1d75-453c-9552-2a0fbd4aa87d', 'attached_at': '', 'detached_at': '', 'volume_id': '8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'serial': '8895fd52-1e47-4be8-bc86-5cb974e51fe1'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 925.761193] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-501ad4db-4098-4076-b854-50e488c2c149 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.768144] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Waiting for the task: (returnval){ [ 925.768144] env[61978]: value = "task-1394824" [ 925.768144] env[61978]: _type = "Task" [ 925.768144] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.776743] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394824, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.788088] env[61978]: DEBUG oslo_vmware.api [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394823, 'name': PowerOffVM_Task, 'duration_secs': 0.190718} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.788362] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 925.788537] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 925.788792] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc0b5169-2170-4705-b24f-ee54ab38e36e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.860395] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 925.861025] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 925.861025] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Deleting the datastore file [datastore2] 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 925.861234] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d13fa0dd-1811-459f-9197-34e6df44184f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.869686] env[61978]: DEBUG oslo_vmware.api [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for the task: (returnval){ [ 925.869686] env[61978]: value = "task-1394826" [ 925.869686] env[61978]: _type = "Task" [ 925.869686] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.880718] env[61978]: DEBUG oslo_vmware.api [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.942400] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.694s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.943130] env[61978]: DEBUG nova.compute.manager [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 925.946521] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 23.190s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.089192] env[61978]: DEBUG nova.compute.manager [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Received event network-vif-plugged-cff12603-dd53-4dec-ad6d-70278b3ac575 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.089192] env[61978]: DEBUG oslo_concurrency.lockutils [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] Acquiring lock "b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.089192] env[61978]: DEBUG oslo_concurrency.lockutils [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] Lock "b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.089192] env[61978]: DEBUG oslo_concurrency.lockutils [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] Lock "b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.089509] env[61978]: DEBUG nova.compute.manager [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] No waiting events found dispatching network-vif-plugged-cff12603-dd53-4dec-ad6d-70278b3ac575 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 926.089509] env[61978]: WARNING nova.compute.manager [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Received unexpected event network-vif-plugged-cff12603-dd53-4dec-ad6d-70278b3ac575 for instance with vm_state building and task_state spawning. [ 926.089662] env[61978]: DEBUG nova.compute.manager [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Received event network-changed-cff12603-dd53-4dec-ad6d-70278b3ac575 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.089815] env[61978]: DEBUG nova.compute.manager [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Refreshing instance network info cache due to event network-changed-cff12603-dd53-4dec-ad6d-70278b3ac575. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 926.089983] env[61978]: DEBUG oslo_concurrency.lockutils [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] Acquiring lock "refresh_cache-b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.118672] env[61978]: INFO nova.compute.manager [-] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Took 1.03 seconds to deallocate network for instance. [ 926.134534] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Releasing lock "refresh_cache-b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.134979] env[61978]: DEBUG nova.compute.manager [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Instance network_info: |[{"id": "cff12603-dd53-4dec-ad6d-70278b3ac575", "address": "fa:16:3e:7d:3d:fd", "network": {"id": "6d985334-1a92-44ff-8c2d-568c7ec11134", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-115854966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff197aac855441de95476c15480603cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff12603-dd", "ovs_interfaceid": "cff12603-dd53-4dec-ad6d-70278b3ac575", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 926.135429] env[61978]: DEBUG oslo_concurrency.lockutils [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] Acquired lock "refresh_cache-b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.135702] env[61978]: DEBUG nova.network.neutron [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Refreshing network info cache for port cff12603-dd53-4dec-ad6d-70278b3ac575 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.138703] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:3d:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '939c05b6-8f31-4f3a-95ac-6297e0bd243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cff12603-dd53-4dec-ad6d-70278b3ac575', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 926.149766] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Creating folder: Project (ff197aac855441de95476c15480603cf). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 926.151701] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a21c521e-5fab-41bf-9e4b-c4c90a1808d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.168043] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Created folder: Project (ff197aac855441de95476c15480603cf) in parent group-v295764. [ 926.168350] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Creating folder: Instances. Parent ref: group-v295844. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 926.173669] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-774cc50e-a01c-4a76-b8d2-62dddbfe7f40 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.184014] env[61978]: DEBUG oslo_vmware.api [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394822, 'name': PowerOnVM_Task, 'duration_secs': 0.530609} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.184362] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 926.184688] env[61978]: INFO nova.compute.manager [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Took 8.32 seconds to spawn the instance on the hypervisor. [ 926.184958] env[61978]: DEBUG nova.compute.manager [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 926.186287] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbcb2e0-421f-4f30-ba3b-6e0a3b17c4bd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.191770] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Created folder: Instances in parent group-v295844. [ 926.192106] env[61978]: DEBUG oslo.service.loopingcall [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 926.192889] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 926.193636] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76c8354f-097b-41bd-8d0e-20c65d9020e5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.231042] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 926.231042] env[61978]: value = "task-1394829" [ 926.231042] env[61978]: _type = "Task" [ 926.231042] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.237752] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394829, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.281439] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394824, 'name': Rename_Task, 'duration_secs': 0.189305} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.281751] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.282285] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-427029fc-1eb6-4534-bfc2-4e0d2000ffb3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.290280] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Waiting for the task: (returnval){ [ 926.290280] env[61978]: value = "task-1394830" [ 926.290280] env[61978]: _type = "Task" [ 926.290280] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.302611] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394830, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.381677] env[61978]: DEBUG oslo_vmware.api [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.450992] env[61978]: DEBUG nova.compute.utils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 926.465254] env[61978]: DEBUG nova.compute.manager [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 926.465481] env[61978]: DEBUG nova.network.neutron [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 926.512393] env[61978]: DEBUG nova.policy [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2214c53c241b4aa496361e8fd7403aa4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eef91afc00664fdb9cb1ba727a29de2c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 926.627239] env[61978]: INFO nova.compute.manager [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Instance disappeared during terminate [ 926.627359] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5043bb7e-54e2-4fba-845f-8bb6600f20d6 tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "a0762952-2afd-448a-8e46-ba788a4ca131" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.822s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.735474] env[61978]: INFO nova.compute.manager [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Took 37.07 seconds to build instance. [ 926.742734] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394829, 'name': CreateVM_Task, 'duration_secs': 0.510893} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.742904] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 926.743713] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.743876] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.744203] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 926.744481] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a4e3c2e-e113-4956-af16-07e7f784f709 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.751695] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Waiting for the task: (returnval){ [ 926.751695] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ed3f7b-ad37-b5db-7631-b90df9d68f8b" [ 926.751695] env[61978]: _type = "Task" [ 926.751695] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.768568] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ed3f7b-ad37-b5db-7631-b90df9d68f8b, 'name': SearchDatastore_Task, 'duration_secs': 0.011956} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.774577] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.774799] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 926.775061] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.775216] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.775398] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.776033] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1f2b91c-03d2-4cd6-996f-11679329438c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.786493] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.786730] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 926.787469] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cedb8513-5aea-4676-9b24-013508e8df5f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.798209] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Waiting for the task: (returnval){ [ 926.798209] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5250a9ac-5d51-8043-2ed1-b8c6479f450d" [ 926.798209] env[61978]: _type = "Task" [ 926.798209] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.805607] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394830, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.811693] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5250a9ac-5d51-8043-2ed1-b8c6479f450d, 'name': SearchDatastore_Task, 'duration_secs': 0.01019} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.812584] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5a9b34a-ca4a-4842-b3d4-c01849c23a90 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.818595] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Waiting for the task: (returnval){ [ 926.818595] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522a439e-a316-8142-1d7b-8130ab1fa910" [ 926.818595] env[61978]: _type = "Task" [ 926.818595] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.828365] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522a439e-a316-8142-1d7b-8130ab1fa910, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.897599] env[61978]: DEBUG oslo_vmware.api [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Task: {'id': task-1394826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.598138} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.897956] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.898383] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 926.898493] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 926.898732] env[61978]: INFO nova.compute.manager [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Took 1.64 seconds to destroy the instance on the hypervisor. [ 926.899115] env[61978]: DEBUG oslo.service.loopingcall [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 926.899301] env[61978]: DEBUG nova.compute.manager [-] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 926.899426] env[61978]: DEBUG nova.network.neutron [-] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.946839] env[61978]: DEBUG nova.network.neutron [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Successfully created port: 699028fe-9214-4b16-9c8f-468ae56d50b0 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 926.970950] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Applying migration context for instance b26a4784-698d-477a-8db7-58156899d231 as it has an incoming, in-progress migration a5c81d5c-f908-418a-92b4-c9cc3e2002af. Migration status is confirming {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 926.974324] env[61978]: INFO nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating resource usage from migration a5c81d5c-f908-418a-92b4-c9cc3e2002af [ 926.977996] env[61978]: DEBUG nova.compute.manager [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 927.024352] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.024550] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance e9e2deb5-5bf9-4b57-832f-9928d3cda162 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.024673] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 92eb5edb-803b-48d4-8c4f-338d7c3b3d13 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.024789] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 243e7146-46fc-43f4-a83b-cdc58f397f9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.024904] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 85fc5af8-454d-4042-841a-945b7e84eb6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.025046] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance eb7cb200-c162-4e92-8916-6d9abd5cf34d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.025162] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 371ddf66-a39b-41c4-bbd1-2a1c1b99834e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.025526] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance d2614f71-3026-41d4-ae04-eaede9b5ead5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 927.025526] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 96bef3f3-a45c-43ba-a86a-66c1d5686ea6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.025526] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 2f5b06f6-7178-4fdf-93b6-65477f020898 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 927.025694] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance a4d45835-f065-445f-bcb6-d1b01d545cb0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 927.025770] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 3ddf7322-5504-408f-af6c-af73fb1c4286 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 927.026270] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 76dff032-a806-4910-a48b-8850b05131c1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 927.026270] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Migration a5c81d5c-f908-418a-92b4-c9cc3e2002af is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 927.026270] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b26a4784-698d-477a-8db7-58156899d231 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.026270] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance bb0c149c-920e-47c4-a960-47b2fb443431 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 927.026428] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 50788030-4dc2-4215-bf2c-acba5dd33ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.026428] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance c17c986e-c008-4414-8dd1-4ea836458048 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.026656] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance ff793464-9bef-449f-8485-36d3b8fb1d69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.026656] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance e30d4a9f-1d75-453c-9552-2a0fbd4aa87d actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.026772] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 32bcb974-8db9-43e2-b397-b497f3a4f30c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.026868] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.027057] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 78b78ae7-74fe-4403-be9b-229abe6a7353 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 927.049614] env[61978]: DEBUG nova.network.neutron [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Updated VIF entry in instance network info cache for port cff12603-dd53-4dec-ad6d-70278b3ac575. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 927.051142] env[61978]: DEBUG nova.network.neutron [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Updating instance_info_cache with network_info: [{"id": "cff12603-dd53-4dec-ad6d-70278b3ac575", "address": "fa:16:3e:7d:3d:fd", "network": {"id": "6d985334-1a92-44ff-8c2d-568c7ec11134", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-115854966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff197aac855441de95476c15480603cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff12603-dd", "ovs_interfaceid": "cff12603-dd53-4dec-ad6d-70278b3ac575", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.238168] env[61978]: DEBUG oslo_concurrency.lockutils [None req-50daa253-bf10-41c4-bf63-ebc8ad1ec03a tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Lock "32bcb974-8db9-43e2-b397-b497f3a4f30c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.582s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.301494] env[61978]: DEBUG oslo_vmware.api [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394830, 'name': PowerOnVM_Task, 'duration_secs': 0.714108} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.301752] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 927.301948] env[61978]: INFO nova.compute.manager [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Took 10.32 seconds to spawn the instance on the hypervisor. [ 927.302701] env[61978]: DEBUG nova.compute.manager [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 927.303510] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d94347-e30a-4ca9-867e-d0829ac1fa74 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.330682] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522a439e-a316-8142-1d7b-8130ab1fa910, 'name': SearchDatastore_Task, 'duration_secs': 0.010985} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.330682] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.330810] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14/b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 927.334913] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0273187-f3b3-4e9b-bc45-b70bfe39365f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.340843] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Waiting for the task: (returnval){ [ 927.340843] env[61978]: value = "task-1394831" [ 927.340843] env[61978]: _type = "Task" [ 927.340843] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.359316] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.482356] env[61978]: DEBUG nova.compute.manager [req-d50546ce-975c-4f86-9c1f-0bf4e3fcb548 req-558db1f0-4e4d-4d5c-895d-a80f59f0dcba service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Received event network-vif-deleted-a5290cfd-6d88-4c49-a54c-626d4c4843bd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 927.482606] env[61978]: INFO nova.compute.manager [req-d50546ce-975c-4f86-9c1f-0bf4e3fcb548 req-558db1f0-4e4d-4d5c-895d-a80f59f0dcba service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Neutron deleted interface a5290cfd-6d88-4c49-a54c-626d4c4843bd; detaching it from the instance and deleting it from the info cache [ 927.482825] env[61978]: DEBUG nova.network.neutron [req-d50546ce-975c-4f86-9c1f-0bf4e3fcb548 req-558db1f0-4e4d-4d5c-895d-a80f59f0dcba service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.531207] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f3c837fb-be7e-40a6-aae4-7f213c62ab2c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 927.554226] env[61978]: DEBUG oslo_concurrency.lockutils [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] Releasing lock "refresh_cache-b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.554544] env[61978]: DEBUG nova.compute.manager [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Received event network-changed-6248c9c9-4f43-44c4-a25a-63b0c9920e89 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 927.554723] env[61978]: DEBUG nova.compute.manager [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Refreshing instance network info cache due to event network-changed-6248c9c9-4f43-44c4-a25a-63b0c9920e89. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 927.555050] env[61978]: DEBUG oslo_concurrency.lockutils [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] Acquiring lock "refresh_cache-ff793464-9bef-449f-8485-36d3b8fb1d69" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.555116] env[61978]: DEBUG oslo_concurrency.lockutils [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] Acquired lock "refresh_cache-ff793464-9bef-449f-8485-36d3b8fb1d69" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.555248] env[61978]: DEBUG nova.network.neutron [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Refreshing network info cache for port 6248c9c9-4f43-44c4-a25a-63b0c9920e89 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 927.654995] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.655286] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.741676] env[61978]: DEBUG nova.compute.manager [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 927.824140] env[61978]: DEBUG nova.network.neutron [-] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.830161] env[61978]: INFO nova.compute.manager [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Took 38.60 seconds to build instance. [ 927.856198] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.987237] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-adf7755b-3250-4479-93b3-4310bf3f32f1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.991991] env[61978]: DEBUG nova.compute.manager [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 927.998416] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2dd571-4abe-4ed1-8cc9-b0234b57f438 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.021586] env[61978]: DEBUG nova.virt.hardware [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 928.021586] env[61978]: DEBUG nova.virt.hardware [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 928.021586] env[61978]: DEBUG nova.virt.hardware [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 928.021757] env[61978]: DEBUG nova.virt.hardware [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 928.021757] env[61978]: DEBUG nova.virt.hardware [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 928.022485] env[61978]: DEBUG nova.virt.hardware [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 928.022485] env[61978]: DEBUG nova.virt.hardware [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 928.022485] env[61978]: DEBUG nova.virt.hardware [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 928.022485] env[61978]: DEBUG nova.virt.hardware [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 928.022715] env[61978]: DEBUG nova.virt.hardware [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 928.022715] env[61978]: DEBUG nova.virt.hardware [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 928.023660] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070d524e-ed49-49f8-81e9-e9d29cb6bcf9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.037765] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f22e097d-f1a5-414a-82cc-ab455db876c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 928.039297] env[61978]: DEBUG nova.compute.manager [req-d50546ce-975c-4f86-9c1f-0bf4e3fcb548 req-558db1f0-4e4d-4d5c-895d-a80f59f0dcba service nova] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Detach interface failed, port_id=a5290cfd-6d88-4c49-a54c-626d4c4843bd, reason: Instance 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 928.046260] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5ca9b7-46e9-4555-af53-f3ec245cae01 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.267801] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.331512] env[61978]: INFO nova.compute.manager [-] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Took 1.43 seconds to deallocate network for instance. [ 928.332057] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0413bd97-bcb4-450c-ae51-717c05e90ced tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Lock "e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.388s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.354393] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394831, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.951625} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.354680] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14/b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 928.354956] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 928.355282] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d837e6f7-ec03-461b-ac31-767443db5f4f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.364067] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Waiting for the task: (returnval){ [ 928.364067] env[61978]: value = "task-1394832" [ 928.364067] env[61978]: _type = "Task" [ 928.364067] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.374893] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.522465] env[61978]: DEBUG nova.compute.manager [req-5afe55b1-49e9-4b6b-8ebe-11bdb18d867f req-7688a6d5-af24-4fa8-ad4c-f57bdbd87a06 service nova] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Received event network-vif-plugged-699028fe-9214-4b16-9c8f-468ae56d50b0 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 928.523551] env[61978]: DEBUG oslo_concurrency.lockutils [req-5afe55b1-49e9-4b6b-8ebe-11bdb18d867f req-7688a6d5-af24-4fa8-ad4c-f57bdbd87a06 service nova] Acquiring lock "78b78ae7-74fe-4403-be9b-229abe6a7353-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.524179] env[61978]: DEBUG oslo_concurrency.lockutils [req-5afe55b1-49e9-4b6b-8ebe-11bdb18d867f req-7688a6d5-af24-4fa8-ad4c-f57bdbd87a06 service nova] Lock "78b78ae7-74fe-4403-be9b-229abe6a7353-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.524903] env[61978]: DEBUG oslo_concurrency.lockutils [req-5afe55b1-49e9-4b6b-8ebe-11bdb18d867f req-7688a6d5-af24-4fa8-ad4c-f57bdbd87a06 service nova] Lock "78b78ae7-74fe-4403-be9b-229abe6a7353-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.525235] env[61978]: DEBUG nova.compute.manager [req-5afe55b1-49e9-4b6b-8ebe-11bdb18d867f req-7688a6d5-af24-4fa8-ad4c-f57bdbd87a06 service nova] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] No waiting events found dispatching network-vif-plugged-699028fe-9214-4b16-9c8f-468ae56d50b0 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 928.525583] env[61978]: WARNING nova.compute.manager [req-5afe55b1-49e9-4b6b-8ebe-11bdb18d867f req-7688a6d5-af24-4fa8-ad4c-f57bdbd87a06 service nova] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Received unexpected event network-vif-plugged-699028fe-9214-4b16-9c8f-468ae56d50b0 for instance with vm_state building and task_state spawning. [ 928.541086] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 928.574834] env[61978]: DEBUG nova.network.neutron [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Successfully updated port: 699028fe-9214-4b16-9c8f-468ae56d50b0 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 928.842757] env[61978]: DEBUG nova.compute.manager [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 928.844696] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.857717] env[61978]: DEBUG nova.network.neutron [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Updated VIF entry in instance network info cache for port 6248c9c9-4f43-44c4-a25a-63b0c9920e89. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 928.858290] env[61978]: DEBUG nova.network.neutron [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Updating instance_info_cache with network_info: [{"id": "6248c9c9-4f43-44c4-a25a-63b0c9920e89", "address": "fa:16:3e:f1:b2:3a", "network": {"id": "8438071b-a3cd-4e2d-867f-1fa4a67bccca", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2102143534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e0b4b9239e545cd8bd19bc98e5fc5b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6248c9c9-4f", "ovs_interfaceid": "6248c9c9-4f43-44c4-a25a-63b0c9920e89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.877242] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394832, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076375} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.877633] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 928.878590] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79bd44f-0346-4c3c-92bd-4b2b2bc77f21 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.905481] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14/b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.906218] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3b2339f-e50d-4217-8862-8878110b4096 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.937631] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Waiting for the task: (returnval){ [ 928.937631] env[61978]: value = "task-1394833" [ 928.937631] env[61978]: _type = "Task" [ 928.937631] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.947942] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394833, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.045131] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance dd686727-fc33-4dc4-b386-aabec27cf215 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 929.076259] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquiring lock "refresh_cache-78b78ae7-74fe-4403-be9b-229abe6a7353" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.076410] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquired lock "refresh_cache-78b78ae7-74fe-4403-be9b-229abe6a7353" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.076560] env[61978]: DEBUG nova.network.neutron [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.362176] env[61978]: DEBUG oslo_concurrency.lockutils [req-f1316c45-c9f7-4c63-b046-18c936dd6116 req-dc461b53-f84b-4b6e-9e99-5398ccd8c6a0 service nova] Releasing lock "refresh_cache-ff793464-9bef-449f-8485-36d3b8fb1d69" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.376068] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.448891] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394833, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.514570] env[61978]: DEBUG nova.compute.manager [req-056652cd-a455-4147-8ceb-0ad4bb1bcd9f req-299900d3-6af9-4747-87c0-7cfb841635f7 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Received event network-changed-a33524bd-7627-49fa-ab70-55b0962b8ca3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.514825] env[61978]: DEBUG nova.compute.manager [req-056652cd-a455-4147-8ceb-0ad4bb1bcd9f req-299900d3-6af9-4747-87c0-7cfb841635f7 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Refreshing instance network info cache due to event network-changed-a33524bd-7627-49fa-ab70-55b0962b8ca3. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 929.515058] env[61978]: DEBUG oslo_concurrency.lockutils [req-056652cd-a455-4147-8ceb-0ad4bb1bcd9f req-299900d3-6af9-4747-87c0-7cfb841635f7 service nova] Acquiring lock "refresh_cache-32bcb974-8db9-43e2-b397-b497f3a4f30c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.515203] env[61978]: DEBUG oslo_concurrency.lockutils [req-056652cd-a455-4147-8ceb-0ad4bb1bcd9f req-299900d3-6af9-4747-87c0-7cfb841635f7 service nova] Acquired lock "refresh_cache-32bcb974-8db9-43e2-b397-b497f3a4f30c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.515405] env[61978]: DEBUG nova.network.neutron [req-056652cd-a455-4147-8ceb-0ad4bb1bcd9f req-299900d3-6af9-4747-87c0-7cfb841635f7 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Refreshing network info cache for port a33524bd-7627-49fa-ab70-55b0962b8ca3 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 929.550205] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 929.612728] env[61978]: DEBUG nova.network.neutron [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 929.754485] env[61978]: DEBUG nova.network.neutron [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Updating instance_info_cache with network_info: [{"id": "699028fe-9214-4b16-9c8f-468ae56d50b0", "address": "fa:16:3e:ba:b4:fa", "network": {"id": "0106eca5-56c3-4566-b7fd-0fb456a9ffff", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-752118827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eef91afc00664fdb9cb1ba727a29de2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699028fe-92", "ovs_interfaceid": "699028fe-9214-4b16-9c8f-468ae56d50b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.950873] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394833, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.054044] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance ea1c2d74-70b4-4547-a887-78e291c3082a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.257032] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Releasing lock "refresh_cache-78b78ae7-74fe-4403-be9b-229abe6a7353" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.257378] env[61978]: DEBUG nova.compute.manager [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Instance network_info: |[{"id": "699028fe-9214-4b16-9c8f-468ae56d50b0", "address": "fa:16:3e:ba:b4:fa", "network": {"id": "0106eca5-56c3-4566-b7fd-0fb456a9ffff", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-752118827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eef91afc00664fdb9cb1ba727a29de2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699028fe-92", "ovs_interfaceid": "699028fe-9214-4b16-9c8f-468ae56d50b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 930.257823] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:b4:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d054505-89d3-49c5-8b38-5da917a42c49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '699028fe-9214-4b16-9c8f-468ae56d50b0', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.265355] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Creating folder: Project (eef91afc00664fdb9cb1ba727a29de2c). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.265660] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f35cfd1-3cc6-4b2f-822b-1f3abc43eb00 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.278543] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Created folder: Project (eef91afc00664fdb9cb1ba727a29de2c) in parent group-v295764. [ 930.278720] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Creating folder: Instances. Parent ref: group-v295847. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.278953] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-666c61d1-0500-465d-82ed-129045b4e710 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.289689] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Created folder: Instances in parent group-v295847. [ 930.289939] env[61978]: DEBUG oslo.service.loopingcall [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 930.290149] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.290358] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-773a8091-2fb1-442b-b761-a1f646a35ec0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.308858] env[61978]: DEBUG nova.network.neutron [req-056652cd-a455-4147-8ceb-0ad4bb1bcd9f req-299900d3-6af9-4747-87c0-7cfb841635f7 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Updated VIF entry in instance network info cache for port a33524bd-7627-49fa-ab70-55b0962b8ca3. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 930.309420] env[61978]: DEBUG nova.network.neutron [req-056652cd-a455-4147-8ceb-0ad4bb1bcd9f req-299900d3-6af9-4747-87c0-7cfb841635f7 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Updating instance_info_cache with network_info: [{"id": "a33524bd-7627-49fa-ab70-55b0962b8ca3", "address": "fa:16:3e:17:23:76", "network": {"id": "263e1420-6a47-473d-a527-fa8b27d6c538", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-912409184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d5745ee1ed64809a93b1e0b1dea11bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa33524bd-76", "ovs_interfaceid": "a33524bd-7627-49fa-ab70-55b0962b8ca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.313545] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.313545] env[61978]: value = "task-1394836" [ 930.313545] env[61978]: _type = "Task" [ 930.313545] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.321148] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394836, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.449996] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394833, 'name': ReconfigVM_Task, 'duration_secs': 1.032768} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.450310] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Reconfigured VM instance instance-0000001d to attach disk [datastore2] b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14/b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 930.450967] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75673ccf-b050-40f7-9a9f-cee6ea69ea6c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.458769] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Waiting for the task: (returnval){ [ 930.458769] env[61978]: value = "task-1394837" [ 930.458769] env[61978]: _type = "Task" [ 930.458769] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.467654] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394837, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.555409] env[61978]: DEBUG nova.compute.manager [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Received event network-changed-699028fe-9214-4b16-9c8f-468ae56d50b0 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 930.555582] env[61978]: DEBUG nova.compute.manager [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Refreshing instance network info cache due to event network-changed-699028fe-9214-4b16-9c8f-468ae56d50b0. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 930.555798] env[61978]: DEBUG oslo_concurrency.lockutils [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] Acquiring lock "refresh_cache-78b78ae7-74fe-4403-be9b-229abe6a7353" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.555941] env[61978]: DEBUG oslo_concurrency.lockutils [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] Acquired lock "refresh_cache-78b78ae7-74fe-4403-be9b-229abe6a7353" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.556109] env[61978]: DEBUG nova.network.neutron [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Refreshing network info cache for port 699028fe-9214-4b16-9c8f-468ae56d50b0 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.557713] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 8a21e6a7-c34e-4af0-b1fd-8a501694614c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.812991] env[61978]: DEBUG oslo_concurrency.lockutils [req-056652cd-a455-4147-8ceb-0ad4bb1bcd9f req-299900d3-6af9-4747-87c0-7cfb841635f7 service nova] Releasing lock "refresh_cache-32bcb974-8db9-43e2-b397-b497f3a4f30c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.822678] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394836, 'name': CreateVM_Task, 'duration_secs': 0.359531} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.822844] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 930.823477] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.823641] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.823959] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 930.824218] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4094a333-f6f5-4b93-afca-411e3819f1d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.828705] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for the task: (returnval){ [ 930.828705] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528b3883-8de2-cb8e-56aa-473840d30646" [ 930.828705] env[61978]: _type = "Task" [ 930.828705] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.836273] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528b3883-8de2-cb8e-56aa-473840d30646, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.969368] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394837, 'name': Rename_Task, 'duration_secs': 0.210722} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.969743] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 930.970049] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56fddcce-bab2-4c06-ae5c-83de569405e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.977719] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Waiting for the task: (returnval){ [ 930.977719] env[61978]: value = "task-1394838" [ 930.977719] env[61978]: _type = "Task" [ 930.977719] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.986373] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.062418] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 931.325672] env[61978]: DEBUG nova.network.neutron [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Updated VIF entry in instance network info cache for port 699028fe-9214-4b16-9c8f-468ae56d50b0. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.326166] env[61978]: DEBUG nova.network.neutron [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Updating instance_info_cache with network_info: [{"id": "699028fe-9214-4b16-9c8f-468ae56d50b0", "address": "fa:16:3e:ba:b4:fa", "network": {"id": "0106eca5-56c3-4566-b7fd-0fb456a9ffff", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-752118827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eef91afc00664fdb9cb1ba727a29de2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699028fe-92", "ovs_interfaceid": "699028fe-9214-4b16-9c8f-468ae56d50b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.340120] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528b3883-8de2-cb8e-56aa-473840d30646, 'name': SearchDatastore_Task, 'duration_secs': 0.040335} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.340973] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.341212] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 931.341470] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.341596] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.341771] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.342256] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-084317f1-fc50-4fbe-ac81-0235b2807476 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.351240] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.351439] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 931.352096] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3bace45-7c9e-4c66-b075-8aa7b87f68a6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.358027] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for the task: (returnval){ [ 931.358027] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521a48ca-86e3-883f-0c00-58e19f999ee8" [ 931.358027] env[61978]: _type = "Task" [ 931.358027] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.365433] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521a48ca-86e3-883f-0c00-58e19f999ee8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.490737] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394838, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.565889] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 931.831259] env[61978]: DEBUG oslo_concurrency.lockutils [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] Releasing lock "refresh_cache-78b78ae7-74fe-4403-be9b-229abe6a7353" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.831523] env[61978]: DEBUG nova.compute.manager [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Received event network-changed-9b850a07-34a8-4e1d-afff-7650895b0238 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 931.831695] env[61978]: DEBUG nova.compute.manager [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Refreshing instance network info cache due to event network-changed-9b850a07-34a8-4e1d-afff-7650895b0238. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 931.831908] env[61978]: DEBUG oslo_concurrency.lockutils [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] Acquiring lock "refresh_cache-e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.832062] env[61978]: DEBUG oslo_concurrency.lockutils [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] Acquired lock "refresh_cache-e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.832229] env[61978]: DEBUG nova.network.neutron [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Refreshing network info cache for port 9b850a07-34a8-4e1d-afff-7650895b0238 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 931.867873] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521a48ca-86e3-883f-0c00-58e19f999ee8, 'name': SearchDatastore_Task, 'duration_secs': 0.019005} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.868752] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60641580-2e8e-43ba-89ad-448a30e035a1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.873989] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for the task: (returnval){ [ 931.873989] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5296c46d-3aa0-40c6-fcfa-84e14c4aa7dd" [ 931.873989] env[61978]: _type = "Task" [ 931.873989] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.882185] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5296c46d-3aa0-40c6-fcfa-84e14c4aa7dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.988692] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394838, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.068833] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance bdfdd685-e440-4f53-b6c4-2ee2f06acba8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 932.392066] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5296c46d-3aa0-40c6-fcfa-84e14c4aa7dd, 'name': SearchDatastore_Task, 'duration_secs': 0.01015} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.392435] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.392776] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 78b78ae7-74fe-4403-be9b-229abe6a7353/78b78ae7-74fe-4403-be9b-229abe6a7353.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 932.393138] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5232550-9741-45a9-90ae-4d96c8d123bb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.402252] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for the task: (returnval){ [ 932.402252] env[61978]: value = "task-1394839" [ 932.402252] env[61978]: _type = "Task" [ 932.402252] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.411460] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394839, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.489902] env[61978]: DEBUG oslo_vmware.api [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394838, 'name': PowerOnVM_Task, 'duration_secs': 1.042615} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.490554] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 932.490717] env[61978]: INFO nova.compute.manager [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Took 9.50 seconds to spawn the instance on the hypervisor. [ 932.490899] env[61978]: DEBUG nova.compute.manager [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 932.493968] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fcd172-16c7-40f8-b517-7adfdb600190 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.573596] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f1001633-e4e5-4de1-8a6b-cf653e43d821 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 932.573959] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 932.574189] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 932.709774] env[61978]: DEBUG nova.network.neutron [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Updated VIF entry in instance network info cache for port 9b850a07-34a8-4e1d-afff-7650895b0238. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 932.710275] env[61978]: DEBUG nova.network.neutron [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Updating instance_info_cache with network_info: [{"id": "9b850a07-34a8-4e1d-afff-7650895b0238", "address": "fa:16:3e:a8:d3:69", "network": {"id": "48011c8f-c34e-428d-a391-5540b4d1900a", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-115035202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eece5991af2241b5a2e30c69894be228", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b850a07-34", "ovs_interfaceid": "9b850a07-34a8-4e1d-afff-7650895b0238", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.913713] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394839, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505036} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.914045] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 78b78ae7-74fe-4403-be9b-229abe6a7353/78b78ae7-74fe-4403-be9b-229abe6a7353.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 932.914204] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 932.916651] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf9ce45f-a63a-4510-9302-9952f8efbf74 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.923976] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for the task: (returnval){ [ 932.923976] env[61978]: value = "task-1394840" [ 932.923976] env[61978]: _type = "Task" [ 932.923976] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.932920] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394840, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.013076] env[61978]: INFO nova.compute.manager [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Took 38.97 seconds to build instance. [ 933.066768] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b03c44-bbec-4c7d-874b-5e34f9ad828d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.075120] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586a9ea1-e52c-44a6-89d6-64fdec0b0c07 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.104341] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07165cbb-fa2e-4de4-9011-1778d45c9152 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.111768] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c98c339-6174-4adb-8628-535d34cf649a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.124878] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.214202] env[61978]: DEBUG oslo_concurrency.lockutils [req-2b2d06e1-18f0-49b6-bc82-d3c4d3cb15c2 req-12c54683-d185-4de6-9950-cd11c60c2082 service nova] Releasing lock "refresh_cache-e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.437510] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394840, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078176} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.437610] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 933.438422] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e93343d-872c-49a3-9a58-f27cbe148ed4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.462622] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 78b78ae7-74fe-4403-be9b-229abe6a7353/78b78ae7-74fe-4403-be9b-229abe6a7353.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 933.462949] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b8b7f56-d4b5-4ec4-9d41-caa4a7a7d3be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.484459] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for the task: (returnval){ [ 933.484459] env[61978]: value = "task-1394841" [ 933.484459] env[61978]: _type = "Task" [ 933.484459] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.494100] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394841, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.514959] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a1dc1d5-ca3a-4ddd-b250-ba1a494e1e83 tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Lock "b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.629233] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 933.994914] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394841, 'name': ReconfigVM_Task, 'duration_secs': 0.332171} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.995591] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 78b78ae7-74fe-4403-be9b-229abe6a7353/78b78ae7-74fe-4403-be9b-229abe6a7353.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.996251] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6284fba7-f912-4b5d-bf09-dd78cfcd9668 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.004957] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for the task: (returnval){ [ 934.004957] env[61978]: value = "task-1394842" [ 934.004957] env[61978]: _type = "Task" [ 934.004957] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.014624] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394842, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.019152] env[61978]: DEBUG nova.compute.manager [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 934.134442] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 934.134708] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.188s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.135018] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.926s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.137032] env[61978]: INFO nova.compute.claims [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 934.430905] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Acquiring lock "b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.431377] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Lock "b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.431640] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Acquiring lock "b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.431839] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Lock "b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.432064] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Lock "b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.436947] env[61978]: INFO nova.compute.manager [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Terminating instance [ 934.439037] env[61978]: DEBUG nova.compute.manager [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 934.439338] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 934.440825] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4acde0-b240-4d87-ba15-5c18c247a348 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.449661] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 934.449939] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8c71f39-dcaa-4d7f-aa29-9347f5ea5544 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.459333] env[61978]: DEBUG oslo_vmware.api [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Waiting for the task: (returnval){ [ 934.459333] env[61978]: value = "task-1394843" [ 934.459333] env[61978]: _type = "Task" [ 934.459333] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.468643] env[61978]: DEBUG oslo_vmware.api [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.515854] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394842, 'name': Rename_Task, 'duration_secs': 0.145432} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.516182] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 934.516452] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31092675-588a-43a5-9274-017a293b2dfd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.526252] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for the task: (returnval){ [ 934.526252] env[61978]: value = "task-1394844" [ 934.526252] env[61978]: _type = "Task" [ 934.526252] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.538736] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394844, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.544376] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.970158] env[61978]: DEBUG oslo_vmware.api [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394843, 'name': PowerOffVM_Task, 'duration_secs': 0.291058} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.970558] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 934.970753] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 934.971047] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3cc1e782-02ba-4e97-ba0e-ba7f3ffd4a1a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.042966] env[61978]: DEBUG oslo_vmware.api [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394844, 'name': PowerOnVM_Task, 'duration_secs': 0.473984} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.043385] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 935.043685] env[61978]: INFO nova.compute.manager [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Took 7.05 seconds to spawn the instance on the hypervisor. [ 935.043960] env[61978]: DEBUG nova.compute.manager [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 935.045240] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49aa5a9d-2eb9-449a-8085-bec384441433 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.050516] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 935.050808] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 935.051123] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Deleting the datastore file [datastore2] b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 935.051914] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b38a152-a244-4897-b672-6f93efb1fe8d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.067251] env[61978]: DEBUG oslo_vmware.api [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Waiting for the task: (returnval){ [ 935.067251] env[61978]: value = "task-1394846" [ 935.067251] env[61978]: _type = "Task" [ 935.067251] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.078657] env[61978]: DEBUG oslo_vmware.api [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394846, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.140175] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 935.140317] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 935.140445] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 935.565680] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd922941-9529-456c-ad56-17b75a27a931 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.573335] env[61978]: INFO nova.compute.manager [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Took 37.63 seconds to build instance. [ 935.580655] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6127dd83-5dbb-49fe-87c1-b177a2d2d9df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.583991] env[61978]: DEBUG oslo_vmware.api [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Task: {'id': task-1394846, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.230026} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.584293] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 935.584541] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 935.584741] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 935.584915] env[61978]: INFO nova.compute.manager [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Took 1.15 seconds to destroy the instance on the hypervisor. [ 935.585157] env[61978]: DEBUG oslo.service.loopingcall [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 935.585659] env[61978]: DEBUG nova.compute.manager [-] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 935.585758] env[61978]: DEBUG nova.network.neutron [-] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 935.612721] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca118704-6791-4f51-9489-d7ea1aa7635f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.621619] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db650b6c-6432-4bd1-bc1b-b51cecdcacb9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.637570] env[61978]: DEBUG nova.compute.provider_tree [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.646938] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Skipping network cache update for instance because it is being deleted. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 935.647111] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Skipping network cache update for instance because it is Building. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 935.664229] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.664431] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquired lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.664605] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Forcefully refreshing network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 935.664734] env[61978]: DEBUG nova.objects.instance [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lazy-loading 'info_cache' on Instance uuid 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.895088] env[61978]: DEBUG nova.compute.manager [req-9d121936-73e8-4617-8211-f6ae301c8307 req-f437c750-a898-4277-ad36-6b3bb62abb9f service nova] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Received event network-vif-deleted-cff12603-dd53-4dec-ad6d-70278b3ac575 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 935.895331] env[61978]: INFO nova.compute.manager [req-9d121936-73e8-4617-8211-f6ae301c8307 req-f437c750-a898-4277-ad36-6b3bb62abb9f service nova] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Neutron deleted interface cff12603-dd53-4dec-ad6d-70278b3ac575; detaching it from the instance and deleting it from the info cache [ 935.895507] env[61978]: DEBUG nova.network.neutron [req-9d121936-73e8-4617-8211-f6ae301c8307 req-f437c750-a898-4277-ad36-6b3bb62abb9f service nova] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.075794] env[61978]: DEBUG oslo_concurrency.lockutils [None req-88e638fb-8252-4e2c-8da8-947b23a4caf9 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lock "78b78ae7-74fe-4403-be9b-229abe6a7353" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.741s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.141736] env[61978]: DEBUG nova.scheduler.client.report [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 936.367171] env[61978]: DEBUG nova.network.neutron [-] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.397415] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e79089a-9291-4e06-9c61-92397bb8a7c3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.408220] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8d8acc-60d9-4f3f-939a-bb6053925b03 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.442058] env[61978]: DEBUG nova.compute.manager [req-9d121936-73e8-4617-8211-f6ae301c8307 req-f437c750-a898-4277-ad36-6b3bb62abb9f service nova] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Detach interface failed, port_id=cff12603-dd53-4dec-ad6d-70278b3ac575, reason: Instance b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 936.580409] env[61978]: DEBUG nova.compute.manager [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 936.646092] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.511s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.646361] env[61978]: DEBUG nova.compute.manager [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 936.648942] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.758s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.651726] env[61978]: INFO nova.compute.claims [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 936.674420] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquiring lock "78b78ae7-74fe-4403-be9b-229abe6a7353" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.674772] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lock "78b78ae7-74fe-4403-be9b-229abe6a7353" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.675019] env[61978]: INFO nova.compute.manager [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Rebooting instance [ 936.699695] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 936.870517] env[61978]: INFO nova.compute.manager [-] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Took 1.28 seconds to deallocate network for instance. [ 937.116386] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.156192] env[61978]: DEBUG nova.compute.utils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 937.159430] env[61978]: DEBUG nova.compute.manager [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 937.159600] env[61978]: DEBUG nova.network.neutron [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 937.201100] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquiring lock "refresh_cache-78b78ae7-74fe-4403-be9b-229abe6a7353" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.201327] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquired lock "refresh_cache-78b78ae7-74fe-4403-be9b-229abe6a7353" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.201570] env[61978]: DEBUG nova.network.neutron [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.228053] env[61978]: DEBUG nova.policy [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65cc77a6782d42dd80d174df20fee70a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df79d3305e464a6b83f18497a2464140', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 937.305200] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.377716] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.660861] env[61978]: DEBUG nova.compute.manager [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 937.680757] env[61978]: DEBUG nova.network.neutron [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Successfully created port: 4bd8d0bd-32e6-47a0-9308-f8aebe253aa4 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 937.807627] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Releasing lock "refresh_cache-9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.807845] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Updated the network info_cache for instance {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 937.808050] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 937.808304] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 937.808499] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 937.808691] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 937.808878] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 937.809080] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 937.809266] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 937.809363] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 937.952009] env[61978]: DEBUG nova.network.neutron [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Updating instance_info_cache with network_info: [{"id": "699028fe-9214-4b16-9c8f-468ae56d50b0", "address": "fa:16:3e:ba:b4:fa", "network": {"id": "0106eca5-56c3-4566-b7fd-0fb456a9ffff", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-752118827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eef91afc00664fdb9cb1ba727a29de2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap699028fe-92", "ovs_interfaceid": "699028fe-9214-4b16-9c8f-468ae56d50b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.175384] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f13875d-6a30-4281-8db8-d64201404f0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.183942] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6905713-6cdb-4635-8953-d3f88b46c7d8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.217459] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32391663-9908-4ffa-a1e9-05709313bbe5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.227033] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b072e547-e644-487c-8394-0d885ed0a272 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.242171] env[61978]: DEBUG nova.compute.provider_tree [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.316892] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.455246] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Releasing lock "refresh_cache-78b78ae7-74fe-4403-be9b-229abe6a7353" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.457426] env[61978]: DEBUG nova.compute.manager [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 938.458263] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587c9ec9-a9ef-499b-9ff8-7fc08bdc09ca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.672493] env[61978]: DEBUG nova.compute.manager [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 938.700851] env[61978]: DEBUG nova.virt.hardware [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 938.701111] env[61978]: DEBUG nova.virt.hardware [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 938.701273] env[61978]: DEBUG nova.virt.hardware [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.701531] env[61978]: DEBUG nova.virt.hardware [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 938.701597] env[61978]: DEBUG nova.virt.hardware [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.701743] env[61978]: DEBUG nova.virt.hardware [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 938.701949] env[61978]: DEBUG nova.virt.hardware [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 938.702128] env[61978]: DEBUG nova.virt.hardware [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 938.702298] env[61978]: DEBUG nova.virt.hardware [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 938.702483] env[61978]: DEBUG nova.virt.hardware [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 938.702659] env[61978]: DEBUG nova.virt.hardware [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 938.703540] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937476d0-3df5-4bbb-b3fa-70603877bba3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.711940] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfccfd2-1180-49c4-958f-7b9154f8f520 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.745039] env[61978]: DEBUG nova.scheduler.client.report [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 939.250706] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.601s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.251025] env[61978]: DEBUG nova.compute.manager [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 939.256800] env[61978]: DEBUG oslo_concurrency.lockutils [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.749s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.256800] env[61978]: DEBUG oslo_concurrency.lockutils [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.260274] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 28.987s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.308614] env[61978]: INFO nova.scheduler.client.report [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Deleted allocations for instance 3ddf7322-5504-408f-af6c-af73fb1c4286 [ 939.383702] env[61978]: DEBUG nova.compute.manager [req-2b683267-d8e7-4972-bdb9-f623e23c01d9 req-c00726fb-304a-45f3-8551-104c9cd2ecfa service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Received event network-vif-plugged-4bd8d0bd-32e6-47a0-9308-f8aebe253aa4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 939.383932] env[61978]: DEBUG oslo_concurrency.lockutils [req-2b683267-d8e7-4972-bdb9-f623e23c01d9 req-c00726fb-304a-45f3-8551-104c9cd2ecfa service nova] Acquiring lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.384149] env[61978]: DEBUG oslo_concurrency.lockutils [req-2b683267-d8e7-4972-bdb9-f623e23c01d9 req-c00726fb-304a-45f3-8551-104c9cd2ecfa service nova] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.384662] env[61978]: DEBUG oslo_concurrency.lockutils [req-2b683267-d8e7-4972-bdb9-f623e23c01d9 req-c00726fb-304a-45f3-8551-104c9cd2ecfa service nova] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.384662] env[61978]: DEBUG nova.compute.manager [req-2b683267-d8e7-4972-bdb9-f623e23c01d9 req-c00726fb-304a-45f3-8551-104c9cd2ecfa service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] No waiting events found dispatching network-vif-plugged-4bd8d0bd-32e6-47a0-9308-f8aebe253aa4 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 939.384793] env[61978]: WARNING nova.compute.manager [req-2b683267-d8e7-4972-bdb9-f623e23c01d9 req-c00726fb-304a-45f3-8551-104c9cd2ecfa service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Received unexpected event network-vif-plugged-4bd8d0bd-32e6-47a0-9308-f8aebe253aa4 for instance with vm_state building and task_state spawning. [ 939.478021] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255034a8-b7cf-4c6a-8a5c-4373bdc3e35e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.487606] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Doing hard reboot of VM {{(pid=61978) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 939.488991] env[61978]: DEBUG nova.network.neutron [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Successfully updated port: 4bd8d0bd-32e6-47a0-9308-f8aebe253aa4 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 939.490857] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-b062612f-aacf-4787-af02-1da4797de7ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.504801] env[61978]: DEBUG oslo_vmware.api [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for the task: (returnval){ [ 939.504801] env[61978]: value = "task-1394847" [ 939.504801] env[61978]: _type = "Task" [ 939.504801] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.514327] env[61978]: DEBUG oslo_vmware.api [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394847, 'name': ResetVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.766300] env[61978]: DEBUG nova.compute.utils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 939.767944] env[61978]: DEBUG nova.compute.manager [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 939.768126] env[61978]: DEBUG nova.network.neutron [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 939.821485] env[61978]: DEBUG oslo_concurrency.lockutils [None req-131caa15-5d17-4f62-a5b8-d53f9332844c tempest-ImagesNegativeTestJSON-293820273 tempest-ImagesNegativeTestJSON-293820273-project-member] Lock "3ddf7322-5504-408f-af6c-af73fb1c4286" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.822s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.824239] env[61978]: DEBUG nova.policy [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '394d03fc54234c369ad2e1255eee9c82', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c31ffdd4e70d40ecbbb56777f9422a52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 939.998055] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.998237] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquired lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.998424] env[61978]: DEBUG nova.network.neutron [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.017215] env[61978]: DEBUG oslo_vmware.api [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394847, 'name': ResetVM_Task, 'duration_secs': 0.097679} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.017581] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Did hard reboot of VM {{(pid=61978) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 940.017698] env[61978]: DEBUG nova.compute.manager [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 940.018860] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8804ab84-fed6-4eda-aabf-d9854d8cd374 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.214933] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc566f74-4db9-4eab-882f-69f2c327fec7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.223340] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb71b5d-ef73-40d7-93f3-8af42b596c1a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.256557] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02de88f0-df3f-4d31-b351-791ff72e43b1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.266279] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bdeab9-9e9b-416d-8f19-b08f02c5f4cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.271310] env[61978]: DEBUG nova.compute.manager [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 940.283693] env[61978]: DEBUG nova.compute.provider_tree [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.292968] env[61978]: DEBUG nova.network.neutron [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Successfully created port: 7851dad1-d943-463b-82c3-1a83ddc35c79 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 940.529558] env[61978]: DEBUG nova.network.neutron [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 940.533976] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d8d54540-8f52-4c05-855e-b2756a7cd690 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lock "78b78ae7-74fe-4403-be9b-229abe6a7353" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.859s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.667456] env[61978]: DEBUG nova.network.neutron [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Updating instance_info_cache with network_info: [{"id": "4bd8d0bd-32e6-47a0-9308-f8aebe253aa4", "address": "fa:16:3e:88:b3:ad", "network": {"id": "4a12b89f-0910-460a-8694-c424a051b6c6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-917593776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df79d3305e464a6b83f18497a2464140", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd8d0bd-32", "ovs_interfaceid": "4bd8d0bd-32e6-47a0-9308-f8aebe253aa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.786679] env[61978]: DEBUG nova.scheduler.client.report [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 941.169991] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Releasing lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.170349] env[61978]: DEBUG nova.compute.manager [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Instance network_info: |[{"id": "4bd8d0bd-32e6-47a0-9308-f8aebe253aa4", "address": "fa:16:3e:88:b3:ad", "network": {"id": "4a12b89f-0910-460a-8694-c424a051b6c6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-917593776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df79d3305e464a6b83f18497a2464140", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd8d0bd-32", "ovs_interfaceid": "4bd8d0bd-32e6-47a0-9308-f8aebe253aa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 941.170759] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:b3:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4bd8d0bd-32e6-47a0-9308-f8aebe253aa4', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 941.178253] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Creating folder: Project (df79d3305e464a6b83f18497a2464140). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 941.178902] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61d64169-27b0-4003-9ccd-858bb3d64fee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.191691] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Created folder: Project (df79d3305e464a6b83f18497a2464140) in parent group-v295764. [ 941.191937] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Creating folder: Instances. Parent ref: group-v295850. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 941.192207] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae583e6f-64a9-4f62-9a65-96869e3c1f53 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.203289] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Created folder: Instances in parent group-v295850. [ 941.203547] env[61978]: DEBUG oslo.service.loopingcall [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.203761] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 941.203972] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71279257-8517-47b1-9e5b-bcda4f7a41b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.225048] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.225048] env[61978]: value = "task-1394850" [ 941.225048] env[61978]: _type = "Task" [ 941.225048] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.233479] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394850, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.281590] env[61978]: DEBUG nova.compute.manager [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 941.318902] env[61978]: DEBUG nova.virt.hardware [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 941.319395] env[61978]: DEBUG nova.virt.hardware [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 941.319728] env[61978]: DEBUG nova.virt.hardware [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.320120] env[61978]: DEBUG nova.virt.hardware [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 941.320405] env[61978]: DEBUG nova.virt.hardware [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.320683] env[61978]: DEBUG nova.virt.hardware [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 941.321056] env[61978]: DEBUG nova.virt.hardware [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 941.321356] env[61978]: DEBUG nova.virt.hardware [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 941.321821] env[61978]: DEBUG nova.virt.hardware [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 941.322192] env[61978]: DEBUG nova.virt.hardware [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 941.322646] env[61978]: DEBUG nova.virt.hardware [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 941.327247] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4534f4f4-7360-4325-b994-29f824dd74b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.338557] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b331f98-bae3-43fa-af33-3b59fde2646c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.404510] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquiring lock "78b78ae7-74fe-4403-be9b-229abe6a7353" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.404814] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lock "78b78ae7-74fe-4403-be9b-229abe6a7353" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.405076] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquiring lock "78b78ae7-74fe-4403-be9b-229abe6a7353-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.405280] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lock "78b78ae7-74fe-4403-be9b-229abe6a7353-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.405509] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lock "78b78ae7-74fe-4403-be9b-229abe6a7353-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.409214] env[61978]: INFO nova.compute.manager [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Terminating instance [ 941.410691] env[61978]: DEBUG nova.compute.manager [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 941.410952] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.411866] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f28ff6d-e0fc-4b18-aa69-8d2cb848474e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.422969] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.423738] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60bd7839-27c1-4fa2-9eaa-6f97297d0562 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.431719] env[61978]: DEBUG oslo_vmware.api [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for the task: (returnval){ [ 941.431719] env[61978]: value = "task-1394851" [ 941.431719] env[61978]: _type = "Task" [ 941.431719] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.444743] env[61978]: DEBUG oslo_vmware.api [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394851, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.498990] env[61978]: DEBUG nova.compute.manager [req-734092e9-e191-4484-aef2-035480c66705 req-3ef6daaa-503f-4400-985e-f9ee0f92870b service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Received event network-changed-4bd8d0bd-32e6-47a0-9308-f8aebe253aa4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 941.499210] env[61978]: DEBUG nova.compute.manager [req-734092e9-e191-4484-aef2-035480c66705 req-3ef6daaa-503f-4400-985e-f9ee0f92870b service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Refreshing instance network info cache due to event network-changed-4bd8d0bd-32e6-47a0-9308-f8aebe253aa4. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 941.499422] env[61978]: DEBUG oslo_concurrency.lockutils [req-734092e9-e191-4484-aef2-035480c66705 req-3ef6daaa-503f-4400-985e-f9ee0f92870b service nova] Acquiring lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.499565] env[61978]: DEBUG oslo_concurrency.lockutils [req-734092e9-e191-4484-aef2-035480c66705 req-3ef6daaa-503f-4400-985e-f9ee0f92870b service nova] Acquired lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.499720] env[61978]: DEBUG nova.network.neutron [req-734092e9-e191-4484-aef2-035480c66705 req-3ef6daaa-503f-4400-985e-f9ee0f92870b service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Refreshing network info cache for port 4bd8d0bd-32e6-47a0-9308-f8aebe253aa4 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 941.736221] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394850, 'name': CreateVM_Task, 'duration_secs': 0.374818} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.736637] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 941.740019] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.740019] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.740019] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 941.740019] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7806df23-fbcc-4214-87f5-1877d75e36f3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.744830] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 941.744830] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a9868e-e5bf-089e-1596-94ec0ab49c63" [ 941.744830] env[61978]: _type = "Task" [ 941.744830] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.754135] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a9868e-e5bf-089e-1596-94ec0ab49c63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.800161] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.538s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.801116] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.469s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.802815] env[61978]: INFO nova.compute.claims [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.942606] env[61978]: DEBUG oslo_vmware.api [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394851, 'name': PowerOffVM_Task, 'duration_secs': 0.287186} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.942872] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.943052] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.943318] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1c77d02-465f-43be-b6cc-843cac8e8013 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.041025] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 942.041025] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 942.041025] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Deleting the datastore file [datastore1] 78b78ae7-74fe-4403-be9b-229abe6a7353 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 942.041025] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc66e3b2-d84c-4e73-a964-e71f67b35910 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.046031] env[61978]: DEBUG oslo_vmware.api [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for the task: (returnval){ [ 942.046031] env[61978]: value = "task-1394853" [ 942.046031] env[61978]: _type = "Task" [ 942.046031] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.054653] env[61978]: DEBUG oslo_vmware.api [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394853, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.258454] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a9868e-e5bf-089e-1596-94ec0ab49c63, 'name': SearchDatastore_Task, 'duration_secs': 0.011292} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.258845] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.259162] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.259471] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.259679] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.259918] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.260249] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01bfdd7f-3e50-42f2-acc2-4098a8a8aba7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.270360] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.270682] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.271631] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-836955ce-7b03-47d8-9362-ed0d02b0f330 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.277706] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 942.277706] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524c9cab-8ac4-548f-11a4-1a420d062312" [ 942.277706] env[61978]: _type = "Task" [ 942.277706] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.286535] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524c9cab-8ac4-548f-11a4-1a420d062312, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.373018] env[61978]: INFO nova.scheduler.client.report [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Deleted allocation for migration a5c81d5c-f908-418a-92b4-c9cc3e2002af [ 942.432209] env[61978]: DEBUG nova.network.neutron [req-734092e9-e191-4484-aef2-035480c66705 req-3ef6daaa-503f-4400-985e-f9ee0f92870b service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Updated VIF entry in instance network info cache for port 4bd8d0bd-32e6-47a0-9308-f8aebe253aa4. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 942.432597] env[61978]: DEBUG nova.network.neutron [req-734092e9-e191-4484-aef2-035480c66705 req-3ef6daaa-503f-4400-985e-f9ee0f92870b service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Updating instance_info_cache with network_info: [{"id": "4bd8d0bd-32e6-47a0-9308-f8aebe253aa4", "address": "fa:16:3e:88:b3:ad", "network": {"id": "4a12b89f-0910-460a-8694-c424a051b6c6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-917593776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df79d3305e464a6b83f18497a2464140", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd8d0bd-32", "ovs_interfaceid": "4bd8d0bd-32e6-47a0-9308-f8aebe253aa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.472804] env[61978]: DEBUG nova.network.neutron [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Successfully updated port: 7851dad1-d943-463b-82c3-1a83ddc35c79 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 942.560830] env[61978]: DEBUG oslo_vmware.api [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Task: {'id': task-1394853, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.412104} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.562313] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 942.562313] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 942.566267] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 942.566505] env[61978]: INFO nova.compute.manager [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Took 1.16 seconds to destroy the instance on the hypervisor. [ 942.566732] env[61978]: DEBUG oslo.service.loopingcall [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.566940] env[61978]: DEBUG nova.compute.manager [-] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 942.567049] env[61978]: DEBUG nova.network.neutron [-] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 942.793096] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524c9cab-8ac4-548f-11a4-1a420d062312, 'name': SearchDatastore_Task, 'duration_secs': 0.013997} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.796365] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24a7b9ce-478d-4a37-868d-d2f03be898af {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.803864] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 942.803864] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52804dd0-f719-55ec-caf2-6f49c83254c6" [ 942.803864] env[61978]: _type = "Task" [ 942.803864] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.817730] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52804dd0-f719-55ec-caf2-6f49c83254c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.881024] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a1697a46-49fd-4cde-b8a0-4a294ff7bd92 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "b26a4784-698d-477a-8db7-58156899d231" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 35.904s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.935409] env[61978]: DEBUG oslo_concurrency.lockutils [req-734092e9-e191-4484-aef2-035480c66705 req-3ef6daaa-503f-4400-985e-f9ee0f92870b service nova] Releasing lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.979757] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "refresh_cache-f22e097d-f1a5-414a-82cc-ab455db876c7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.979757] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "refresh_cache-f22e097d-f1a5-414a-82cc-ab455db876c7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.980194] env[61978]: DEBUG nova.network.neutron [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.313969] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52804dd0-f719-55ec-caf2-6f49c83254c6, 'name': SearchDatastore_Task, 'duration_secs': 0.013783} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.316539] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.316767] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f3c837fb-be7e-40a6-aae4-7f213c62ab2c/f3c837fb-be7e-40a6-aae4-7f213c62ab2c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 943.317208] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2a8f95a-e374-40a5-bc28-461bda024f14 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.325875] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 943.325875] env[61978]: value = "task-1394854" [ 943.325875] env[61978]: _type = "Task" [ 943.325875] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.337195] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1394854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.343856] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88492f3a-2aad-44ca-b6a3-ad1bf810a69c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.351627] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1443f304-080a-494e-8f8d-120c595bbe32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.383336] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14de806a-2e42-46a5-b144-5b0fe7554004 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.392339] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65236b95-3788-4a23-bafa-82cceb4ba1e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.407035] env[61978]: DEBUG nova.compute.provider_tree [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.441771] env[61978]: DEBUG nova.network.neutron [-] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.550147] env[61978]: DEBUG nova.compute.manager [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Received event network-vif-plugged-7851dad1-d943-463b-82c3-1a83ddc35c79 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.550147] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] Acquiring lock "f22e097d-f1a5-414a-82cc-ab455db876c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.550147] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] Lock "f22e097d-f1a5-414a-82cc-ab455db876c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.550147] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] Lock "f22e097d-f1a5-414a-82cc-ab455db876c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.550597] env[61978]: DEBUG nova.compute.manager [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] No waiting events found dispatching network-vif-plugged-7851dad1-d943-463b-82c3-1a83ddc35c79 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 943.551243] env[61978]: WARNING nova.compute.manager [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Received unexpected event network-vif-plugged-7851dad1-d943-463b-82c3-1a83ddc35c79 for instance with vm_state building and task_state spawning. [ 943.551555] env[61978]: DEBUG nova.compute.manager [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Received event network-changed-7851dad1-d943-463b-82c3-1a83ddc35c79 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.551887] env[61978]: DEBUG nova.compute.manager [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Refreshing instance network info cache due to event network-changed-7851dad1-d943-463b-82c3-1a83ddc35c79. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 943.552400] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] Acquiring lock "refresh_cache-f22e097d-f1a5-414a-82cc-ab455db876c7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.573012] env[61978]: DEBUG nova.network.neutron [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 943.836527] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1394854, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.913041] env[61978]: DEBUG nova.scheduler.client.report [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 943.944690] env[61978]: INFO nova.compute.manager [-] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Took 1.38 seconds to deallocate network for instance. [ 943.990379] env[61978]: DEBUG nova.network.neutron [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Updating instance_info_cache with network_info: [{"id": "7851dad1-d943-463b-82c3-1a83ddc35c79", "address": "fa:16:3e:52:8a:f8", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7851dad1-d9", "ovs_interfaceid": "7851dad1-d943-463b-82c3-1a83ddc35c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.343925] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1394854, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515343} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.344164] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f3c837fb-be7e-40a6-aae4-7f213c62ab2c/f3c837fb-be7e-40a6-aae4-7f213c62ab2c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 944.344428] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 944.344657] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4aad26b4-2133-4e7d-b4a3-224d0b3089cb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.352162] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 944.352162] env[61978]: value = "task-1394855" [ 944.352162] env[61978]: _type = "Task" [ 944.352162] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.364880] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1394855, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.421210] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.620s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.421758] env[61978]: DEBUG nova.compute.manager [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 944.424507] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.353s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.424871] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.427063] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.695s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.427261] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.429436] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.231s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.430880] env[61978]: INFO nova.compute.claims [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 944.455825] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.460923] env[61978]: INFO nova.scheduler.client.report [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Deleted allocations for instance 2f5b06f6-7178-4fdf-93b6-65477f020898 [ 944.472333] env[61978]: INFO nova.scheduler.client.report [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Deleted allocations for instance d2614f71-3026-41d4-ae04-eaede9b5ead5 [ 944.492721] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "refresh_cache-f22e097d-f1a5-414a-82cc-ab455db876c7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.493479] env[61978]: DEBUG nova.compute.manager [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Instance network_info: |[{"id": "7851dad1-d943-463b-82c3-1a83ddc35c79", "address": "fa:16:3e:52:8a:f8", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7851dad1-d9", "ovs_interfaceid": "7851dad1-d943-463b-82c3-1a83ddc35c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 944.493802] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] Acquired lock "refresh_cache-f22e097d-f1a5-414a-82cc-ab455db876c7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.494068] env[61978]: DEBUG nova.network.neutron [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Refreshing network info cache for port 7851dad1-d943-463b-82c3-1a83ddc35c79 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 944.495772] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:8a:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '411f389f-4e4f-4450-891e-38944cac6135', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7851dad1-d943-463b-82c3-1a83ddc35c79', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 944.504128] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating folder: Project (c31ffdd4e70d40ecbbb56777f9422a52). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 944.507602] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b520c24c-b347-4169-996f-2baa56fd274b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.523390] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Created folder: Project (c31ffdd4e70d40ecbbb56777f9422a52) in parent group-v295764. [ 944.527358] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating folder: Instances. Parent ref: group-v295853. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 944.528299] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-934b93b2-c6ec-4104-b61d-8087069031bf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.552778] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Created folder: Instances in parent group-v295853. [ 944.553053] env[61978]: DEBUG oslo.service.loopingcall [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.553310] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 944.553552] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4665541e-682a-4546-8337-34419a87d767 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.576372] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 944.576372] env[61978]: value = "task-1394858" [ 944.576372] env[61978]: _type = "Task" [ 944.576372] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.586164] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394858, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.864258] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1394855, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.280342} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.864610] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 944.865556] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe52f29-34d6-4faf-bdcd-59148b6f070c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.892909] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] f3c837fb-be7e-40a6-aae4-7f213c62ab2c/f3c837fb-be7e-40a6-aae4-7f213c62ab2c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 944.893234] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc11687c-07d9-4445-9177-9f01384897b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.918434] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 944.918434] env[61978]: value = "task-1394859" [ 944.918434] env[61978]: _type = "Task" [ 944.918434] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.929158] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1394859, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.938207] env[61978]: DEBUG nova.compute.utils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 944.940154] env[61978]: DEBUG nova.compute.manager [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 944.940154] env[61978]: DEBUG nova.network.neutron [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 944.968764] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6943753f-d5fb-40d3-ad04-1707fa67c411 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713 tempest-FloatingIPsAssociationNegativeTestJSON-2067676713-project-member] Lock "2f5b06f6-7178-4fdf-93b6-65477f020898" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.372s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.984556] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5538cf3c-d66b-4724-ac77-0c77508cbe9d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "d2614f71-3026-41d4-ae04-eaede9b5ead5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.428s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.004251] env[61978]: DEBUG nova.policy [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a21e624965c4a20a540bd4fba8773b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27ccd1f7b852490a8d92e2c0e714e7d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 945.090425] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394858, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.275336] env[61978]: DEBUG nova.network.neutron [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Updated VIF entry in instance network info cache for port 7851dad1-d943-463b-82c3-1a83ddc35c79. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.275675] env[61978]: DEBUG nova.network.neutron [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Updating instance_info_cache with network_info: [{"id": "7851dad1-d943-463b-82c3-1a83ddc35c79", "address": "fa:16:3e:52:8a:f8", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7851dad1-d9", "ovs_interfaceid": "7851dad1-d943-463b-82c3-1a83ddc35c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.430358] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1394859, 'name': ReconfigVM_Task, 'duration_secs': 0.498243} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.432012] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Reconfigured VM instance instance-0000001f to attach disk [datastore2] f3c837fb-be7e-40a6-aae4-7f213c62ab2c/f3c837fb-be7e-40a6-aae4-7f213c62ab2c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.435580] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76b593d9-83b4-4f7c-a06e-7194a35d776d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.438472] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.438802] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.446461] env[61978]: DEBUG nova.compute.manager [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 945.453021] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 945.453021] env[61978]: value = "task-1394860" [ 945.453021] env[61978]: _type = "Task" [ 945.453021] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.468755] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1394860, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.589818] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394858, 'name': CreateVM_Task, 'duration_secs': 0.664062} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.592935] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 945.594898] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.594898] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.594898] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 945.594898] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f840cec-4cd4-4de7-b085-c5b506737491 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.601115] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 945.601115] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eedcdd-228a-6b0e-2ff7-df9b46a21ae8" [ 945.601115] env[61978]: _type = "Task" [ 945.601115] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.614935] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eedcdd-228a-6b0e-2ff7-df9b46a21ae8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.668162] env[61978]: DEBUG nova.network.neutron [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Successfully created port: 5ad44f35-6aec-4586-a2e9-9f486fa4fd57 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 945.780085] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] Releasing lock "refresh_cache-f22e097d-f1a5-414a-82cc-ab455db876c7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.780085] env[61978]: DEBUG nova.compute.manager [req-a5abff45-5841-4790-9170-fac3c00cbdbd req-72709215-8309-4cfe-b29d-6563c06997ce service nova] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Received event network-vif-deleted-699028fe-9214-4b16-9c8f-468ae56d50b0 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.861191] env[61978]: DEBUG oslo_concurrency.lockutils [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "e9e2deb5-5bf9-4b57-832f-9928d3cda162" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.861191] env[61978]: DEBUG oslo_concurrency.lockutils [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "e9e2deb5-5bf9-4b57-832f-9928d3cda162" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.861191] env[61978]: DEBUG oslo_concurrency.lockutils [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "e9e2deb5-5bf9-4b57-832f-9928d3cda162-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.861191] env[61978]: DEBUG oslo_concurrency.lockutils [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "e9e2deb5-5bf9-4b57-832f-9928d3cda162-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.861436] env[61978]: DEBUG oslo_concurrency.lockutils [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "e9e2deb5-5bf9-4b57-832f-9928d3cda162-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.863135] env[61978]: INFO nova.compute.manager [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Terminating instance [ 945.865873] env[61978]: DEBUG nova.compute.manager [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 945.866151] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 945.866992] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ff1904-f9b6-40a2-b5d7-be2d0457ffc0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.880089] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.881586] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f370ac52-4da6-41d9-8898-e2175b97f580 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.893238] env[61978]: DEBUG oslo_vmware.api [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 945.893238] env[61978]: value = "task-1394861" [ 945.893238] env[61978]: _type = "Task" [ 945.893238] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.909883] env[61978]: DEBUG oslo_vmware.api [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.972149] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1394860, 'name': Rename_Task, 'duration_secs': 0.290348} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.973807] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.973807] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d17cff30-f77e-48b0-a1ab-fd08b6b73bb6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.987901] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 945.987901] env[61978]: value = "task-1394862" [ 945.987901] env[61978]: _type = "Task" [ 945.987901] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.999491] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846f8871-a365-4309-8129-48c0fa3b22cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.014373] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486b35dc-7ce5-4817-8575-4592d0d12d88 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.017462] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1394862, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.052430] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef96d50-3e13-42a7-83b6-4d08f30986ac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.061450] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8586f18f-f1e9-4aa7-a4d3-29c96f439144 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.079231] env[61978]: DEBUG nova.compute.provider_tree [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.115587] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eedcdd-228a-6b0e-2ff7-df9b46a21ae8, 'name': SearchDatastore_Task, 'duration_secs': 0.012767} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.115872] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.116117] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 946.116360] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.116509] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.116687] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 946.116985] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f05b752-3337-4eeb-b05e-afbe020d3ca3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.138754] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 946.139131] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 946.139824] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f10a4974-8ec1-4e97-9eee-6b86240e644a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.147199] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 946.147199] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eaaf1c-3a2c-017a-aa9c-e83ae8513112" [ 946.147199] env[61978]: _type = "Task" [ 946.147199] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.159483] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eaaf1c-3a2c-017a-aa9c-e83ae8513112, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.407292] env[61978]: DEBUG oslo_vmware.api [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394861, 'name': PowerOffVM_Task, 'duration_secs': 0.261062} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.407561] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 946.407724] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 946.407976] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea52730f-1394-482c-9e53-91eed45e0b1f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.464848] env[61978]: DEBUG nova.compute.manager [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 946.485175] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 946.485533] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 946.485703] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Deleting the datastore file [datastore2] e9e2deb5-5bf9-4b57-832f-9928d3cda162 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 946.486030] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bb2e748-1093-47b0-ac86-6f39e0b4d085 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.497823] env[61978]: DEBUG oslo_vmware.api [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for the task: (returnval){ [ 946.497823] env[61978]: value = "task-1394864" [ 946.497823] env[61978]: _type = "Task" [ 946.497823] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.500191] env[61978]: DEBUG nova.virt.hardware [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 946.500438] env[61978]: DEBUG nova.virt.hardware [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 946.500664] env[61978]: DEBUG nova.virt.hardware [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 946.501651] env[61978]: DEBUG nova.virt.hardware [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 946.501651] env[61978]: DEBUG nova.virt.hardware [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 946.501651] env[61978]: DEBUG nova.virt.hardware [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 946.501651] env[61978]: DEBUG nova.virt.hardware [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 946.501906] env[61978]: DEBUG nova.virt.hardware [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 946.501906] env[61978]: DEBUG nova.virt.hardware [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 946.502037] env[61978]: DEBUG nova.virt.hardware [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 946.502200] env[61978]: DEBUG nova.virt.hardware [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 946.507112] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e03b03b-54be-4413-8640-8223ecdb656a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.509946] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1394862, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.521882] env[61978]: DEBUG oslo_vmware.api [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.523328] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ab0b9d-badf-496f-85ec-b777bc9fa258 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.583967] env[61978]: DEBUG nova.scheduler.client.report [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 946.660624] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eaaf1c-3a2c-017a-aa9c-e83ae8513112, 'name': SearchDatastore_Task, 'duration_secs': 0.012334} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.661464] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b352d80-eacf-416e-ab25-e638870f7a99 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.668676] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 946.668676] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d8cc1a-ee5b-b7df-2e0a-3afffbc92066" [ 946.668676] env[61978]: _type = "Task" [ 946.668676] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.678181] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d8cc1a-ee5b-b7df-2e0a-3afffbc92066, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.001556] env[61978]: DEBUG oslo_vmware.api [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1394862, 'name': PowerOnVM_Task, 'duration_secs': 0.605665} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.001845] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.002061] env[61978]: INFO nova.compute.manager [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Took 8.33 seconds to spawn the instance on the hypervisor. [ 947.002249] env[61978]: DEBUG nova.compute.manager [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 947.003215] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148d6d37-58b8-4fa2-8470-1165cfa78d36 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.020885] env[61978]: DEBUG oslo_vmware.api [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Task: {'id': task-1394864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389594} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.021057] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 947.021581] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 947.021581] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 947.021581] env[61978]: INFO nova.compute.manager [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Took 1.16 seconds to destroy the instance on the hypervisor. [ 947.021796] env[61978]: DEBUG oslo.service.loopingcall [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 947.023037] env[61978]: DEBUG nova.compute.manager [-] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 947.023037] env[61978]: DEBUG nova.network.neutron [-] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 947.090228] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.090677] env[61978]: DEBUG nova.compute.manager [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 947.093261] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.100s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.095218] env[61978]: INFO nova.compute.claims [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 947.179474] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d8cc1a-ee5b-b7df-2e0a-3afffbc92066, 'name': SearchDatastore_Task, 'duration_secs': 0.020519} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.179749] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.180016] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f22e097d-f1a5-414a-82cc-ab455db876c7/f22e097d-f1a5-414a-82cc-ab455db876c7.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 947.180283] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c1d5859-1704-485b-835b-625949fbbc13 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.190103] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 947.190103] env[61978]: value = "task-1394865" [ 947.190103] env[61978]: _type = "Task" [ 947.190103] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.204702] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394865, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.476508] env[61978]: DEBUG nova.compute.manager [req-e2e79a09-4dee-4ba9-9dcd-993d30fe8c6b req-6e587b30-9249-4e69-b824-16efc968bb02 service nova] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Received event network-vif-deleted-a1a8eaeb-61c3-4540-b925-e5516a063dbd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 947.476711] env[61978]: INFO nova.compute.manager [req-e2e79a09-4dee-4ba9-9dcd-993d30fe8c6b req-6e587b30-9249-4e69-b824-16efc968bb02 service nova] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Neutron deleted interface a1a8eaeb-61c3-4540-b925-e5516a063dbd; detaching it from the instance and deleting it from the info cache [ 947.476933] env[61978]: DEBUG nova.network.neutron [req-e2e79a09-4dee-4ba9-9dcd-993d30fe8c6b req-6e587b30-9249-4e69-b824-16efc968bb02 service nova] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.524768] env[61978]: INFO nova.compute.manager [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Took 42.34 seconds to build instance. [ 947.573374] env[61978]: DEBUG nova.compute.manager [req-ebca66c8-da8d-4dc6-ab00-bd567b03518f req-f0dad4b4-d725-41f0-90ec-6678eb1b8182 service nova] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Received event network-vif-plugged-5ad44f35-6aec-4586-a2e9-9f486fa4fd57 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 947.573641] env[61978]: DEBUG oslo_concurrency.lockutils [req-ebca66c8-da8d-4dc6-ab00-bd567b03518f req-f0dad4b4-d725-41f0-90ec-6678eb1b8182 service nova] Acquiring lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.574114] env[61978]: DEBUG oslo_concurrency.lockutils [req-ebca66c8-da8d-4dc6-ab00-bd567b03518f req-f0dad4b4-d725-41f0-90ec-6678eb1b8182 service nova] Lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.576677] env[61978]: DEBUG oslo_concurrency.lockutils [req-ebca66c8-da8d-4dc6-ab00-bd567b03518f req-f0dad4b4-d725-41f0-90ec-6678eb1b8182 service nova] Lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.576914] env[61978]: DEBUG nova.compute.manager [req-ebca66c8-da8d-4dc6-ab00-bd567b03518f req-f0dad4b4-d725-41f0-90ec-6678eb1b8182 service nova] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] No waiting events found dispatching network-vif-plugged-5ad44f35-6aec-4586-a2e9-9f486fa4fd57 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 947.577112] env[61978]: WARNING nova.compute.manager [req-ebca66c8-da8d-4dc6-ab00-bd567b03518f req-f0dad4b4-d725-41f0-90ec-6678eb1b8182 service nova] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Received unexpected event network-vif-plugged-5ad44f35-6aec-4586-a2e9-9f486fa4fd57 for instance with vm_state building and task_state spawning. [ 947.603020] env[61978]: DEBUG nova.compute.utils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 947.605132] env[61978]: DEBUG nova.compute.manager [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 947.605504] env[61978]: DEBUG nova.network.neutron [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 947.660234] env[61978]: DEBUG nova.policy [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4a65218b82048c893b100da20d5be67', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ffbb7de670794a08afde272d93ec36a3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 947.700205] env[61978]: DEBUG nova.network.neutron [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Successfully updated port: 5ad44f35-6aec-4586-a2e9-9f486fa4fd57 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 947.711119] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394865, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.844590] env[61978]: DEBUG nova.network.neutron [-] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.986511] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d66c9b8-eb3e-40d4-8501-b78abfff70e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.000508] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497828c1-88d9-4add-8633-60515ae4ca96 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.027638] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a7f27b5e-e19c-4396-8cc4-8e1274f2226f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.422s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.046795] env[61978]: DEBUG nova.compute.manager [req-e2e79a09-4dee-4ba9-9dcd-993d30fe8c6b req-6e587b30-9249-4e69-b824-16efc968bb02 service nova] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Detach interface failed, port_id=a1a8eaeb-61c3-4540-b925-e5516a063dbd, reason: Instance e9e2deb5-5bf9-4b57-832f-9928d3cda162 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 948.105912] env[61978]: DEBUG nova.compute.manager [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 948.203150] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394865, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.827225} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.203447] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f22e097d-f1a5-414a-82cc-ab455db876c7/f22e097d-f1a5-414a-82cc-ab455db876c7.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 948.203702] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 948.206954] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e3289bcd-0396-459a-a8e1-a8d2e72173cb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.211348] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "refresh_cache-b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.211348] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired lock "refresh_cache-b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.211348] env[61978]: DEBUG nova.network.neutron [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.215997] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 948.215997] env[61978]: value = "task-1394866" [ 948.215997] env[61978]: _type = "Task" [ 948.215997] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.228410] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.238418] env[61978]: DEBUG nova.network.neutron [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Successfully created port: 3dba37f4-66d3-4de6-b597-7ea0b2a0221c {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 948.347416] env[61978]: INFO nova.compute.manager [-] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Took 1.33 seconds to deallocate network for instance. [ 948.551341] env[61978]: DEBUG nova.compute.manager [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 948.638886] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089f0c3a-c124-4616-b93b-341089545d60 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.647487] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e83603-54ac-4f64-89da-5eba6d35da72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.683520] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f339d6ab-cdab-4d15-9765-e247da8bc218 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.693447] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534e25b5-da9f-4ec4-905c-be0f1a1e929f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.709018] env[61978]: DEBUG nova.compute.provider_tree [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.727105] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069225} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.727367] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 948.728563] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebeac80-3e33-441f-8307-be229fd71a8b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.753241] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] f22e097d-f1a5-414a-82cc-ab455db876c7/f22e097d-f1a5-414a-82cc-ab455db876c7.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 948.753544] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8812da81-9d33-4837-be99-0aea437b8f37 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.768427] env[61978]: DEBUG nova.network.neutron [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 948.778122] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 948.778122] env[61978]: value = "task-1394867" [ 948.778122] env[61978]: _type = "Task" [ 948.778122] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.790026] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394867, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.861989] env[61978]: DEBUG oslo_concurrency.lockutils [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.073906] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.118500] env[61978]: DEBUG nova.compute.manager [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 949.149538] env[61978]: DEBUG nova.virt.hardware [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 949.149767] env[61978]: DEBUG nova.virt.hardware [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 949.149924] env[61978]: DEBUG nova.virt.hardware [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 949.150115] env[61978]: DEBUG nova.virt.hardware [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 949.150261] env[61978]: DEBUG nova.virt.hardware [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 949.150454] env[61978]: DEBUG nova.virt.hardware [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 949.150615] env[61978]: DEBUG nova.virt.hardware [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 949.150770] env[61978]: DEBUG nova.virt.hardware [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 949.150936] env[61978]: DEBUG nova.virt.hardware [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 949.151455] env[61978]: DEBUG nova.virt.hardware [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 949.151667] env[61978]: DEBUG nova.virt.hardware [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 949.152678] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90bd7a6-b44b-4cb5-8ea3-3bede3cfa009 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.164429] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f84634-c67d-4d36-ae4e-9b96a226dcab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.211561] env[61978]: DEBUG nova.scheduler.client.report [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 949.237510] env[61978]: DEBUG nova.network.neutron [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Updating instance_info_cache with network_info: [{"id": "5ad44f35-6aec-4586-a2e9-9f486fa4fd57", "address": "fa:16:3e:b6:da:f2", "network": {"id": "2f12a4fc-0daf-4101-919d-9b9372dcbb2f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1016739189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27ccd1f7b852490a8d92e2c0e714e7d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ad44f35-6a", "ovs_interfaceid": "5ad44f35-6aec-4586-a2e9-9f486fa4fd57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.290770] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394867, 'name': ReconfigVM_Task, 'duration_secs': 0.288237} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.291267] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Reconfigured VM instance instance-00000020 to attach disk [datastore2] f22e097d-f1a5-414a-82cc-ab455db876c7/f22e097d-f1a5-414a-82cc-ab455db876c7.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 949.291901] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd86f328-1915-4fe9-9842-82c97e3902da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.300739] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 949.300739] env[61978]: value = "task-1394868" [ 949.300739] env[61978]: _type = "Task" [ 949.300739] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.318106] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394868, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.647311] env[61978]: DEBUG nova.compute.manager [req-8a0c1337-1b21-4116-96ae-3e5a13a7e9c7 req-7eda0a82-5052-40c9-94cc-a95dfd52ae44 service nova] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Received event network-changed-5ad44f35-6aec-4586-a2e9-9f486fa4fd57 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 949.647311] env[61978]: DEBUG nova.compute.manager [req-8a0c1337-1b21-4116-96ae-3e5a13a7e9c7 req-7eda0a82-5052-40c9-94cc-a95dfd52ae44 service nova] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Refreshing instance network info cache due to event network-changed-5ad44f35-6aec-4586-a2e9-9f486fa4fd57. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 949.647311] env[61978]: DEBUG oslo_concurrency.lockutils [req-8a0c1337-1b21-4116-96ae-3e5a13a7e9c7 req-7eda0a82-5052-40c9-94cc-a95dfd52ae44 service nova] Acquiring lock "refresh_cache-b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.691128] env[61978]: DEBUG nova.compute.manager [req-1c2ad9b3-5a9e-4d8f-8706-4f25f1584ae5 req-20673623-78cd-4c6f-b6ff-a71843a0de4e service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Received event network-changed-4bd8d0bd-32e6-47a0-9308-f8aebe253aa4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 949.691371] env[61978]: DEBUG nova.compute.manager [req-1c2ad9b3-5a9e-4d8f-8706-4f25f1584ae5 req-20673623-78cd-4c6f-b6ff-a71843a0de4e service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Refreshing instance network info cache due to event network-changed-4bd8d0bd-32e6-47a0-9308-f8aebe253aa4. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 949.693601] env[61978]: DEBUG oslo_concurrency.lockutils [req-1c2ad9b3-5a9e-4d8f-8706-4f25f1584ae5 req-20673623-78cd-4c6f-b6ff-a71843a0de4e service nova] Acquiring lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.693813] env[61978]: DEBUG oslo_concurrency.lockutils [req-1c2ad9b3-5a9e-4d8f-8706-4f25f1584ae5 req-20673623-78cd-4c6f-b6ff-a71843a0de4e service nova] Acquired lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.693940] env[61978]: DEBUG nova.network.neutron [req-1c2ad9b3-5a9e-4d8f-8706-4f25f1584ae5 req-20673623-78cd-4c6f-b6ff-a71843a0de4e service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Refreshing network info cache for port 4bd8d0bd-32e6-47a0-9308-f8aebe253aa4 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.718591] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.719420] env[61978]: DEBUG nova.compute.manager [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 949.724326] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.469s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.724624] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.002s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.727251] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.646s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.727476] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.729605] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.784s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.731154] env[61978]: INFO nova.compute.claims [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 949.743608] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Releasing lock "refresh_cache-b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.743608] env[61978]: DEBUG nova.compute.manager [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Instance network_info: |[{"id": "5ad44f35-6aec-4586-a2e9-9f486fa4fd57", "address": "fa:16:3e:b6:da:f2", "network": {"id": "2f12a4fc-0daf-4101-919d-9b9372dcbb2f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1016739189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27ccd1f7b852490a8d92e2c0e714e7d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ad44f35-6a", "ovs_interfaceid": "5ad44f35-6aec-4586-a2e9-9f486fa4fd57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 949.743815] env[61978]: DEBUG oslo_concurrency.lockutils [req-8a0c1337-1b21-4116-96ae-3e5a13a7e9c7 req-7eda0a82-5052-40c9-94cc-a95dfd52ae44 service nova] Acquired lock "refresh_cache-b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.743815] env[61978]: DEBUG nova.network.neutron [req-8a0c1337-1b21-4116-96ae-3e5a13a7e9c7 req-7eda0a82-5052-40c9-94cc-a95dfd52ae44 service nova] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Refreshing network info cache for port 5ad44f35-6aec-4586-a2e9-9f486fa4fd57 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.746828] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:da:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ad44f35-6aec-4586-a2e9-9f486fa4fd57', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.756387] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Creating folder: Project (27ccd1f7b852490a8d92e2c0e714e7d5). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 949.756387] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bf5763f-d54c-4b63-ab20-c66510cf6c7c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.768865] env[61978]: INFO nova.scheduler.client.report [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Deleted allocations for instance a4d45835-f065-445f-bcb6-d1b01d545cb0 [ 949.774301] env[61978]: INFO nova.scheduler.client.report [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Deleted allocations for instance bb0c149c-920e-47c4-a960-47b2fb443431 [ 949.781646] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Created folder: Project (27ccd1f7b852490a8d92e2c0e714e7d5) in parent group-v295764. [ 949.781845] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Creating folder: Instances. Parent ref: group-v295856. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 949.782301] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-264d22ab-22ef-4d8a-987e-f59d1f5a2c9f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.796144] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Created folder: Instances in parent group-v295856. [ 949.796552] env[61978]: DEBUG oslo.service.loopingcall [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.796903] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 949.797135] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a08c7a8-8b24-46a5-aabe-41fe8a28876f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.825690] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394868, 'name': Rename_Task, 'duration_secs': 0.156883} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.825690] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 949.825690] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.825690] env[61978]: value = "task-1394871" [ 949.825690] env[61978]: _type = "Task" [ 949.825690] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.826480] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c179d560-1a34-4700-befa-41626c05a0de {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.839104] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394871, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.840822] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 949.840822] env[61978]: value = "task-1394872" [ 949.840822] env[61978]: _type = "Task" [ 949.840822] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.850068] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394872, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.136145] env[61978]: DEBUG nova.network.neutron [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Successfully updated port: 3dba37f4-66d3-4de6-b597-7ea0b2a0221c {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 950.239875] env[61978]: DEBUG nova.compute.utils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 950.240843] env[61978]: DEBUG nova.compute.manager [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 950.241237] env[61978]: DEBUG nova.network.neutron [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 950.285844] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c80b278-6615-4ba0-8804-0226bab978f2 tempest-ServerTagsTestJSON-1196630213 tempest-ServerTagsTestJSON-1196630213-project-member] Lock "a4d45835-f065-445f-bcb6-d1b01d545cb0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.176s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.287091] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c557bf4b-fd73-473d-9730-a0d4aa1c62bc tempest-DeleteServersAdminTestJSON-1900111298 tempest-DeleteServersAdminTestJSON-1900111298-project-member] Lock "bb0c149c-920e-47c4-a960-47b2fb443431" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.248s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.310177] env[61978]: DEBUG nova.policy [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a21e624965c4a20a540bd4fba8773b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27ccd1f7b852490a8d92e2c0e714e7d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 950.343251] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394871, 'name': CreateVM_Task, 'duration_secs': 0.465414} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.347237] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 950.347928] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.348407] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.348787] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 950.349848] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8222f129-4512-4e3b-9763-17a98c567368 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.360874] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394872, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.360874] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 950.360874] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a16927-f462-e196-7f9c-a058156f21c5" [ 950.360874] env[61978]: _type = "Task" [ 950.360874] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.373184] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a16927-f462-e196-7f9c-a058156f21c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.640567] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Acquiring lock "refresh_cache-dd686727-fc33-4dc4-b386-aabec27cf215" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.640773] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Acquired lock "refresh_cache-dd686727-fc33-4dc4-b386-aabec27cf215" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.640985] env[61978]: DEBUG nova.network.neutron [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 950.738304] env[61978]: DEBUG nova.network.neutron [req-1c2ad9b3-5a9e-4d8f-8706-4f25f1584ae5 req-20673623-78cd-4c6f-b6ff-a71843a0de4e service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Updated VIF entry in instance network info cache for port 4bd8d0bd-32e6-47a0-9308-f8aebe253aa4. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.738666] env[61978]: DEBUG nova.network.neutron [req-1c2ad9b3-5a9e-4d8f-8706-4f25f1584ae5 req-20673623-78cd-4c6f-b6ff-a71843a0de4e service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Updating instance_info_cache with network_info: [{"id": "4bd8d0bd-32e6-47a0-9308-f8aebe253aa4", "address": "fa:16:3e:88:b3:ad", "network": {"id": "4a12b89f-0910-460a-8694-c424a051b6c6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-917593776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df79d3305e464a6b83f18497a2464140", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd8d0bd-32", "ovs_interfaceid": "4bd8d0bd-32e6-47a0-9308-f8aebe253aa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.747178] env[61978]: DEBUG nova.compute.manager [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 950.767612] env[61978]: DEBUG nova.network.neutron [req-8a0c1337-1b21-4116-96ae-3e5a13a7e9c7 req-7eda0a82-5052-40c9-94cc-a95dfd52ae44 service nova] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Updated VIF entry in instance network info cache for port 5ad44f35-6aec-4586-a2e9-9f486fa4fd57. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.767954] env[61978]: DEBUG nova.network.neutron [req-8a0c1337-1b21-4116-96ae-3e5a13a7e9c7 req-7eda0a82-5052-40c9-94cc-a95dfd52ae44 service nova] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Updating instance_info_cache with network_info: [{"id": "5ad44f35-6aec-4586-a2e9-9f486fa4fd57", "address": "fa:16:3e:b6:da:f2", "network": {"id": "2f12a4fc-0daf-4101-919d-9b9372dcbb2f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1016739189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27ccd1f7b852490a8d92e2c0e714e7d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ad44f35-6a", "ovs_interfaceid": "5ad44f35-6aec-4586-a2e9-9f486fa4fd57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.857045] env[61978]: DEBUG oslo_vmware.api [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394872, 'name': PowerOnVM_Task, 'duration_secs': 0.574536} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.857374] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 950.857621] env[61978]: INFO nova.compute.manager [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Took 9.58 seconds to spawn the instance on the hypervisor. [ 950.857844] env[61978]: DEBUG nova.compute.manager [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 950.858739] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f092373-82a2-45c0-9079-935b794bd687 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.880677] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a16927-f462-e196-7f9c-a058156f21c5, 'name': SearchDatastore_Task, 'duration_secs': 0.023851} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.884238] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.884238] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.884238] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.884238] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.884369] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.885034] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b63bf731-c95d-42d7-83b2-d5ead06e2d1d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.899611] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.899815] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 950.900665] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac485434-a7e4-4149-aca7-feac286acd30 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.907093] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 950.907093] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52860850-01f0-ae16-391b-fe1cb6c34ac7" [ 950.907093] env[61978]: _type = "Task" [ 950.907093] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.916322] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52860850-01f0-ae16-391b-fe1cb6c34ac7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.936020] env[61978]: DEBUG nova.network.neutron [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Successfully created port: f10bef80-f5ec-40ab-bb8e-c1c9973e4e66 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 951.209993] env[61978]: DEBUG nova.network.neutron [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 951.242175] env[61978]: DEBUG oslo_concurrency.lockutils [req-1c2ad9b3-5a9e-4d8f-8706-4f25f1584ae5 req-20673623-78cd-4c6f-b6ff-a71843a0de4e service nova] Releasing lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.248512] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "b356fc81-f857-4416-8eb0-28c66d137967" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.249639] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "b356fc81-f857-4416-8eb0-28c66d137967" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.277752] env[61978]: DEBUG oslo_concurrency.lockutils [req-8a0c1337-1b21-4116-96ae-3e5a13a7e9c7 req-7eda0a82-5052-40c9-94cc-a95dfd52ae44 service nova] Releasing lock "refresh_cache-b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.356482] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7220e109-eab5-435f-8072-4ed84d777474 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.371519] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420ab646-8d5f-4f99-9f49-b978b5b1b60a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.425346] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46bcd6a4-2aee-45e5-8b26-ea50703784e5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.428660] env[61978]: INFO nova.compute.manager [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Took 44.56 seconds to build instance. [ 951.439716] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52860850-01f0-ae16-391b-fe1cb6c34ac7, 'name': SearchDatastore_Task, 'duration_secs': 0.012278} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.444118] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2667751d-ddce-48e5-b23f-775e5f9d6cc9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.447604] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd639ef2-4c01-4628-ad4b-e79e349f0918 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.456346] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 951.456346] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523e2df3-1631-6bdb-6bcb-740a26a91d22" [ 951.456346] env[61978]: _type = "Task" [ 951.456346] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.465386] env[61978]: DEBUG nova.compute.provider_tree [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.479638] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523e2df3-1631-6bdb-6bcb-740a26a91d22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.495379] env[61978]: DEBUG nova.network.neutron [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Updating instance_info_cache with network_info: [{"id": "3dba37f4-66d3-4de6-b597-7ea0b2a0221c", "address": "fa:16:3e:24:ed:c0", "network": {"id": "1f707935-8b03-46ff-a533-78214155851f", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2096832297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ffbb7de670794a08afde272d93ec36a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dba37f4-66", "ovs_interfaceid": "3dba37f4-66d3-4de6-b597-7ea0b2a0221c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.690396] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "cb004a19-0048-4766-af7c-0fbde867f422" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.690899] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "cb004a19-0048-4766-af7c-0fbde867f422" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.760040] env[61978]: DEBUG nova.compute.manager [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 951.798317] env[61978]: DEBUG nova.virt.hardware [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 951.798613] env[61978]: DEBUG nova.virt.hardware [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 951.798777] env[61978]: DEBUG nova.virt.hardware [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.798958] env[61978]: DEBUG nova.virt.hardware [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 951.799264] env[61978]: DEBUG nova.virt.hardware [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.799786] env[61978]: DEBUG nova.virt.hardware [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 951.800072] env[61978]: DEBUG nova.virt.hardware [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 951.800286] env[61978]: DEBUG nova.virt.hardware [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 951.800540] env[61978]: DEBUG nova.virt.hardware [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 951.800839] env[61978]: DEBUG nova.virt.hardware [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 951.801304] env[61978]: DEBUG nova.virt.hardware [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 951.802213] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52155c9-5d4d-4497-aa6c-7cfa2555d1e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.807305] env[61978]: DEBUG nova.compute.manager [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Received event network-vif-plugged-3dba37f4-66d3-4de6-b597-7ea0b2a0221c {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 951.807666] env[61978]: DEBUG oslo_concurrency.lockutils [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] Acquiring lock "dd686727-fc33-4dc4-b386-aabec27cf215-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.808252] env[61978]: DEBUG oslo_concurrency.lockutils [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] Lock "dd686727-fc33-4dc4-b386-aabec27cf215-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.808667] env[61978]: DEBUG oslo_concurrency.lockutils [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] Lock "dd686727-fc33-4dc4-b386-aabec27cf215-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.808998] env[61978]: DEBUG nova.compute.manager [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] No waiting events found dispatching network-vif-plugged-3dba37f4-66d3-4de6-b597-7ea0b2a0221c {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 951.809333] env[61978]: WARNING nova.compute.manager [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Received unexpected event network-vif-plugged-3dba37f4-66d3-4de6-b597-7ea0b2a0221c for instance with vm_state building and task_state spawning. [ 951.809511] env[61978]: DEBUG nova.compute.manager [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Received event network-changed-3dba37f4-66d3-4de6-b597-7ea0b2a0221c {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 951.809890] env[61978]: DEBUG nova.compute.manager [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Refreshing instance network info cache due to event network-changed-3dba37f4-66d3-4de6-b597-7ea0b2a0221c. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 951.809890] env[61978]: DEBUG oslo_concurrency.lockutils [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] Acquiring lock "refresh_cache-dd686727-fc33-4dc4-b386-aabec27cf215" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.817752] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90c5e59-b6ef-4b48-9f3b-d51936c48d20 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.943773] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7417c475-dd90-49f6-a4a3-b492d3cb5157 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "f22e097d-f1a5-414a-82cc-ab455db876c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.673s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.971851] env[61978]: DEBUG nova.scheduler.client.report [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 951.979850] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523e2df3-1631-6bdb-6bcb-740a26a91d22, 'name': SearchDatastore_Task, 'duration_secs': 0.031557} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.980477] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.980750] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3/b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 951.980998] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb663a18-143f-4694-8440-a70422efb40c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.988979] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 951.988979] env[61978]: value = "task-1394873" [ 951.988979] env[61978]: _type = "Task" [ 951.988979] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.000305] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Releasing lock "refresh_cache-dd686727-fc33-4dc4-b386-aabec27cf215" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.000592] env[61978]: DEBUG nova.compute.manager [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Instance network_info: |[{"id": "3dba37f4-66d3-4de6-b597-7ea0b2a0221c", "address": "fa:16:3e:24:ed:c0", "network": {"id": "1f707935-8b03-46ff-a533-78214155851f", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2096832297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ffbb7de670794a08afde272d93ec36a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dba37f4-66", "ovs_interfaceid": "3dba37f4-66d3-4de6-b597-7ea0b2a0221c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 952.001098] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394873, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.001972] env[61978]: DEBUG oslo_concurrency.lockutils [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] Acquired lock "refresh_cache-dd686727-fc33-4dc4-b386-aabec27cf215" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.002122] env[61978]: DEBUG nova.network.neutron [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Refreshing network info cache for port 3dba37f4-66d3-4de6-b597-7ea0b2a0221c {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 952.003619] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:ed:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48512b02-ad5c-4105-ba7d-fd4775acf8e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3dba37f4-66d3-4de6-b597-7ea0b2a0221c', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 952.014434] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Creating folder: Project (ffbb7de670794a08afde272d93ec36a3). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 952.016727] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b52efbfa-b71d-4152-a241-697f6d6cf62f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.027549] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Created folder: Project (ffbb7de670794a08afde272d93ec36a3) in parent group-v295764. [ 952.027758] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Creating folder: Instances. Parent ref: group-v295859. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 952.027999] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82ceb4b9-70e5-4397-af48-b7c174c7a3d4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.040512] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Created folder: Instances in parent group-v295859. [ 952.040753] env[61978]: DEBUG oslo.service.loopingcall [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 952.040952] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 952.041203] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4f3f4d2-1155-4aee-a283-8af29d8874b1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.068942] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 952.068942] env[61978]: value = "task-1394876" [ 952.068942] env[61978]: _type = "Task" [ 952.068942] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.080586] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394876, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.445151] env[61978]: DEBUG nova.compute.manager [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 952.482562] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.753s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.483543] env[61978]: DEBUG nova.compute.manager [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 952.487590] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.071s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.487899] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.490296] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.223s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.491938] env[61978]: INFO nova.compute.claims [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.507165] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394873, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.547230] env[61978]: INFO nova.scheduler.client.report [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted allocations for instance 76dff032-a806-4910-a48b-8850b05131c1 [ 952.580736] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394876, 'name': CreateVM_Task, 'duration_secs': 0.38219} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.581069] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 952.581926] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.581926] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.582442] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 952.582774] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c20bea65-e784-4574-9f04-e900639444cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.587604] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 952.587604] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5243807f-d1c7-ee00-0b91-bfdfe51c6def" [ 952.587604] env[61978]: _type = "Task" [ 952.587604] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.596381] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5243807f-d1c7-ee00-0b91-bfdfe51c6def, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.688256] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e0a53708-37e9-4885-8969-38178569d74b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "f22e097d-f1a5-414a-82cc-ab455db876c7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.688561] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e0a53708-37e9-4885-8969-38178569d74b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "f22e097d-f1a5-414a-82cc-ab455db876c7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.688690] env[61978]: DEBUG nova.compute.manager [None req-e0a53708-37e9-4885-8969-38178569d74b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 952.689597] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5208f5d2-3adb-48c1-808b-9248e36c23df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.701941] env[61978]: DEBUG nova.compute.manager [None req-e0a53708-37e9-4885-8969-38178569d74b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61978) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 952.702029] env[61978]: DEBUG nova.objects.instance [None req-e0a53708-37e9-4885-8969-38178569d74b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lazy-loading 'flavor' on Instance uuid f22e097d-f1a5-414a-82cc-ab455db876c7 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 952.822126] env[61978]: DEBUG nova.network.neutron [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Updated VIF entry in instance network info cache for port 3dba37f4-66d3-4de6-b597-7ea0b2a0221c. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 952.822126] env[61978]: DEBUG nova.network.neutron [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Updating instance_info_cache with network_info: [{"id": "3dba37f4-66d3-4de6-b597-7ea0b2a0221c", "address": "fa:16:3e:24:ed:c0", "network": {"id": "1f707935-8b03-46ff-a533-78214155851f", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2096832297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ffbb7de670794a08afde272d93ec36a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48512b02-ad5c-4105-ba7d-fd4775acf8e1", "external-id": "nsx-vlan-transportzone-516", "segmentation_id": 516, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dba37f4-66", "ovs_interfaceid": "3dba37f4-66d3-4de6-b597-7ea0b2a0221c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.986846] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.997922] env[61978]: DEBUG nova.compute.utils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 953.005749] env[61978]: DEBUG nova.compute.manager [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 953.006064] env[61978]: DEBUG nova.network.neutron [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 953.012320] env[61978]: DEBUG nova.compute.manager [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 953.025407] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394873, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604987} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.025407] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3/b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.025407] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.025407] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85a4bcb1-4905-43e0-9160-31c04f2324ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.032866] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 953.032866] env[61978]: value = "task-1394877" [ 953.032866] env[61978]: _type = "Task" [ 953.032866] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.041613] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394877, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.062290] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31b75948-2141-4f7f-aecd-9e638d5800c1 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "76dff032-a806-4910-a48b-8850b05131c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.024s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.102194] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5243807f-d1c7-ee00-0b91-bfdfe51c6def, 'name': SearchDatastore_Task, 'duration_secs': 0.026169} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.102549] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.102830] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 953.103219] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.103517] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.103818] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 953.104122] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5706d6e-22f9-44c8-abe8-f1d97fdbba61 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.116870] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 953.116959] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 953.119027] env[61978]: DEBUG nova.policy [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a21e624965c4a20a540bd4fba8773b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27ccd1f7b852490a8d92e2c0e714e7d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 953.120477] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf475fcc-a7e8-4f8d-aff0-8680acb5959d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.126396] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 953.126396] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c48dbb-0de3-bd8c-b2bf-c21316478f4e" [ 953.126396] env[61978]: _type = "Task" [ 953.126396] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.137264] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c48dbb-0de3-bd8c-b2bf-c21316478f4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.177262] env[61978]: DEBUG nova.compute.manager [req-e586b753-71e6-40b9-928b-bf9b726bdbd0 req-fc8282ec-bda4-4c54-b5bc-9effdbaec55a service nova] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Received event network-vif-plugged-f10bef80-f5ec-40ab-bb8e-c1c9973e4e66 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 953.177262] env[61978]: DEBUG oslo_concurrency.lockutils [req-e586b753-71e6-40b9-928b-bf9b726bdbd0 req-fc8282ec-bda4-4c54-b5bc-9effdbaec55a service nova] Acquiring lock "3a30ecc4-455f-49cf-98e8-d38be6a1c5a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.177262] env[61978]: DEBUG oslo_concurrency.lockutils [req-e586b753-71e6-40b9-928b-bf9b726bdbd0 req-fc8282ec-bda4-4c54-b5bc-9effdbaec55a service nova] Lock "3a30ecc4-455f-49cf-98e8-d38be6a1c5a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.177262] env[61978]: DEBUG oslo_concurrency.lockutils [req-e586b753-71e6-40b9-928b-bf9b726bdbd0 req-fc8282ec-bda4-4c54-b5bc-9effdbaec55a service nova] Lock "3a30ecc4-455f-49cf-98e8-d38be6a1c5a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.177262] env[61978]: DEBUG nova.compute.manager [req-e586b753-71e6-40b9-928b-bf9b726bdbd0 req-fc8282ec-bda4-4c54-b5bc-9effdbaec55a service nova] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] No waiting events found dispatching network-vif-plugged-f10bef80-f5ec-40ab-bb8e-c1c9973e4e66 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 953.177412] env[61978]: WARNING nova.compute.manager [req-e586b753-71e6-40b9-928b-bf9b726bdbd0 req-fc8282ec-bda4-4c54-b5bc-9effdbaec55a service nova] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Received unexpected event network-vif-plugged-f10bef80-f5ec-40ab-bb8e-c1c9973e4e66 for instance with vm_state building and task_state spawning. [ 953.211977] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a53708-37e9-4885-8969-38178569d74b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.212733] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1b97aa3-2cb3-4407-a246-4e9e66531d51 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.220024] env[61978]: DEBUG oslo_vmware.api [None req-e0a53708-37e9-4885-8969-38178569d74b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 953.220024] env[61978]: value = "task-1394878" [ 953.220024] env[61978]: _type = "Task" [ 953.220024] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.232690] env[61978]: DEBUG oslo_vmware.api [None req-e0a53708-37e9-4885-8969-38178569d74b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394878, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.259776] env[61978]: DEBUG nova.network.neutron [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Successfully updated port: f10bef80-f5ec-40ab-bb8e-c1c9973e4e66 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 953.325067] env[61978]: DEBUG oslo_concurrency.lockutils [req-918e3cda-783e-4fdb-a4ea-12e9e09c0e53 req-d5a8c178-4aef-461e-83da-8d7c40335842 service nova] Releasing lock "refresh_cache-dd686727-fc33-4dc4-b386-aabec27cf215" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.553733] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394877, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075874} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.554506] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.555096] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf13801f-e6f4-4f99-8e55-8bc83528a791 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.582412] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3/b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.586092] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ffff626-3522-4a7a-aa98-f4f903bd6f86 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.615633] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 953.615633] env[61978]: value = "task-1394879" [ 953.615633] env[61978]: _type = "Task" [ 953.615633] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.629753] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394879, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.630425] env[61978]: DEBUG nova.network.neutron [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Successfully created port: 35d1b15c-e867-4d4b-8d09-58369a8e74e1 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.650937] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c48dbb-0de3-bd8c-b2bf-c21316478f4e, 'name': SearchDatastore_Task, 'duration_secs': 0.020536} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.651597] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3223b08a-ec7c-47cf-8a79-75ba3a58095e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.661457] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 953.661457] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522d921f-3efa-7135-0416-d6752ddcaf6c" [ 953.661457] env[61978]: _type = "Task" [ 953.661457] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.676021] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522d921f-3efa-7135-0416-d6752ddcaf6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.730944] env[61978]: DEBUG oslo_vmware.api [None req-e0a53708-37e9-4885-8969-38178569d74b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394878, 'name': PowerOffVM_Task, 'duration_secs': 0.445904} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.731238] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a53708-37e9-4885-8969-38178569d74b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 953.731414] env[61978]: DEBUG nova.compute.manager [None req-e0a53708-37e9-4885-8969-38178569d74b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 953.732194] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee017ff-6eb9-4896-83b9-ff4b63e4862c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.764978] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "refresh_cache-3a30ecc4-455f-49cf-98e8-d38be6a1c5a5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.765134] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired lock "refresh_cache-3a30ecc4-455f-49cf-98e8-d38be6a1c5a5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.765288] env[61978]: DEBUG nova.network.neutron [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.942933] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquiring lock "2c1ce021-255f-454d-ba0e-c85380f3e973" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.945488] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lock "2c1ce021-255f-454d-ba0e-c85380f3e973" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.024041] env[61978]: DEBUG nova.compute.manager [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 954.052515] env[61978]: DEBUG nova.virt.hardware [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 954.052765] env[61978]: DEBUG nova.virt.hardware [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 954.052919] env[61978]: DEBUG nova.virt.hardware [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.053161] env[61978]: DEBUG nova.virt.hardware [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 954.055742] env[61978]: DEBUG nova.virt.hardware [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.055742] env[61978]: DEBUG nova.virt.hardware [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 954.055742] env[61978]: DEBUG nova.virt.hardware [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 954.055742] env[61978]: DEBUG nova.virt.hardware [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 954.055742] env[61978]: DEBUG nova.virt.hardware [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 954.055933] env[61978]: DEBUG nova.virt.hardware [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 954.055933] env[61978]: DEBUG nova.virt.hardware [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 954.055933] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b616e0fa-a690-43af-a6eb-041baa76f349 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.067720] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eff8b91-9ae4-458d-8224-483bfc75169e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.111745] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12cf331-c508-430e-b62c-7d349e3f3807 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.123471] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1822dd-6457-4dbe-a058-2837c622111c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.130029] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394879, 'name': ReconfigVM_Task, 'duration_secs': 0.390286} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.130387] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Reconfigured VM instance instance-00000021 to attach disk [datastore1] b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3/b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 954.131022] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3185ba14-2587-4080-8ae6-a575d7458b27 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.168062] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f34d26-6a91-4404-a08e-128d27a8f1f4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.171819] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 954.171819] env[61978]: value = "task-1394880" [ 954.171819] env[61978]: _type = "Task" [ 954.171819] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.183021] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522d921f-3efa-7135-0416-d6752ddcaf6c, 'name': SearchDatastore_Task, 'duration_secs': 0.018622} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.183021] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8db0a7b-af42-48c9-b616-1edabe70047c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.186582] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.186870] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] dd686727-fc33-4dc4-b386-aabec27cf215/dd686727-fc33-4dc4-b386-aabec27cf215.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 954.189987] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e860ab8e-7b37-4586-994b-61412726aa6e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.192092] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394880, 'name': Rename_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.202772] env[61978]: DEBUG nova.compute.provider_tree [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.205391] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 954.205391] env[61978]: value = "task-1394881" [ 954.205391] env[61978]: _type = "Task" [ 954.205391] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.214122] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394881, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.247188] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e0a53708-37e9-4885-8969-38178569d74b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "f22e097d-f1a5-414a-82cc-ab455db876c7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.558s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.333653] env[61978]: DEBUG nova.network.neutron [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 954.577243] env[61978]: DEBUG nova.network.neutron [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Updating instance_info_cache with network_info: [{"id": "f10bef80-f5ec-40ab-bb8e-c1c9973e4e66", "address": "fa:16:3e:59:23:5d", "network": {"id": "2f12a4fc-0daf-4101-919d-9b9372dcbb2f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1016739189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27ccd1f7b852490a8d92e2c0e714e7d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf10bef80-f5", "ovs_interfaceid": "f10bef80-f5ec-40ab-bb8e-c1c9973e4e66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.686573] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394880, 'name': Rename_Task, 'duration_secs': 0.136038} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.686740] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 954.687039] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47c40807-0dd3-44af-85d1-bd0113997a3e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.695087] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 954.695087] env[61978]: value = "task-1394882" [ 954.695087] env[61978]: _type = "Task" [ 954.695087] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.706016] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394882, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.710184] env[61978]: DEBUG nova.scheduler.client.report [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 954.720973] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394881, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.729719] env[61978]: DEBUG oslo_concurrency.lockutils [None req-311e7218-989f-4716-82bb-d0d4160fe09f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "23e2a460-a59f-46b8-bd29-153d6d6a5c92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.729719] env[61978]: DEBUG oslo_concurrency.lockutils [None req-311e7218-989f-4716-82bb-d0d4160fe09f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "23e2a460-a59f-46b8-bd29-153d6d6a5c92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.081651] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Releasing lock "refresh_cache-3a30ecc4-455f-49cf-98e8-d38be6a1c5a5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.082141] env[61978]: DEBUG nova.compute.manager [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Instance network_info: |[{"id": "f10bef80-f5ec-40ab-bb8e-c1c9973e4e66", "address": "fa:16:3e:59:23:5d", "network": {"id": "2f12a4fc-0daf-4101-919d-9b9372dcbb2f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1016739189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27ccd1f7b852490a8d92e2c0e714e7d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf10bef80-f5", "ovs_interfaceid": "f10bef80-f5ec-40ab-bb8e-c1c9973e4e66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 955.082442] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:23:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f10bef80-f5ec-40ab-bb8e-c1c9973e4e66', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 955.094958] env[61978]: DEBUG oslo.service.loopingcall [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.094958] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 955.094958] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-740050a3-f3cf-413b-a306-44abfc4b998f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.122089] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 955.122089] env[61978]: value = "task-1394883" [ 955.122089] env[61978]: _type = "Task" [ 955.122089] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.138438] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394883, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.208915] env[61978]: DEBUG oslo_vmware.api [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394882, 'name': PowerOnVM_Task, 'duration_secs': 0.461237} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.215955] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.216466] env[61978]: INFO nova.compute.manager [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Took 8.75 seconds to spawn the instance on the hypervisor. [ 955.216869] env[61978]: DEBUG nova.compute.manager [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 955.218054] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.728s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.218662] env[61978]: DEBUG nova.compute.manager [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 955.225669] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e96b66-8567-49d9-be1f-4b3daa862f41 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.234237] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.390s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.234560] env[61978]: DEBUG nova.objects.instance [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lazy-loading 'resources' on Instance uuid 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.244036] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394881, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559848} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.245425] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] dd686727-fc33-4dc4-b386-aabec27cf215/dd686727-fc33-4dc4-b386-aabec27cf215.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 955.247776] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 955.251982] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d2e9c09-b634-4a53-830c-3fa8118dc383 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.262447] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 955.262447] env[61978]: value = "task-1394884" [ 955.262447] env[61978]: _type = "Task" [ 955.262447] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.301155] env[61978]: DEBUG nova.network.neutron [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Successfully updated port: 35d1b15c-e867-4d4b-8d09-58369a8e74e1 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 955.506288] env[61978]: DEBUG nova.compute.manager [req-ca67b692-a6fe-404a-878a-bdec336cb88b req-ae5521df-4afe-4593-bf78-33025835fe54 service nova] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Received event network-vif-plugged-35d1b15c-e867-4d4b-8d09-58369a8e74e1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.506288] env[61978]: DEBUG oslo_concurrency.lockutils [req-ca67b692-a6fe-404a-878a-bdec336cb88b req-ae5521df-4afe-4593-bf78-33025835fe54 service nova] Acquiring lock "ea1c2d74-70b4-4547-a887-78e291c3082a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.506671] env[61978]: DEBUG oslo_concurrency.lockutils [req-ca67b692-a6fe-404a-878a-bdec336cb88b req-ae5521df-4afe-4593-bf78-33025835fe54 service nova] Lock "ea1c2d74-70b4-4547-a887-78e291c3082a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.506671] env[61978]: DEBUG oslo_concurrency.lockutils [req-ca67b692-a6fe-404a-878a-bdec336cb88b req-ae5521df-4afe-4593-bf78-33025835fe54 service nova] Lock "ea1c2d74-70b4-4547-a887-78e291c3082a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.506750] env[61978]: DEBUG nova.compute.manager [req-ca67b692-a6fe-404a-878a-bdec336cb88b req-ae5521df-4afe-4593-bf78-33025835fe54 service nova] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] No waiting events found dispatching network-vif-plugged-35d1b15c-e867-4d4b-8d09-58369a8e74e1 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 955.506906] env[61978]: WARNING nova.compute.manager [req-ca67b692-a6fe-404a-878a-bdec336cb88b req-ae5521df-4afe-4593-bf78-33025835fe54 service nova] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Received unexpected event network-vif-plugged-35d1b15c-e867-4d4b-8d09-58369a8e74e1 for instance with vm_state building and task_state spawning. [ 955.581313] env[61978]: DEBUG nova.compute.manager [req-ce4002f8-0e7f-4328-9d4b-930d195d77d8 req-887a8ec7-dbb5-4680-814c-e0f92fcc9542 service nova] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Received event network-changed-f10bef80-f5ec-40ab-bb8e-c1c9973e4e66 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.581313] env[61978]: DEBUG nova.compute.manager [req-ce4002f8-0e7f-4328-9d4b-930d195d77d8 req-887a8ec7-dbb5-4680-814c-e0f92fcc9542 service nova] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Refreshing instance network info cache due to event network-changed-f10bef80-f5ec-40ab-bb8e-c1c9973e4e66. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 955.581313] env[61978]: DEBUG oslo_concurrency.lockutils [req-ce4002f8-0e7f-4328-9d4b-930d195d77d8 req-887a8ec7-dbb5-4680-814c-e0f92fcc9542 service nova] Acquiring lock "refresh_cache-3a30ecc4-455f-49cf-98e8-d38be6a1c5a5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.581747] env[61978]: DEBUG oslo_concurrency.lockutils [req-ce4002f8-0e7f-4328-9d4b-930d195d77d8 req-887a8ec7-dbb5-4680-814c-e0f92fcc9542 service nova] Acquired lock "refresh_cache-3a30ecc4-455f-49cf-98e8-d38be6a1c5a5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.581747] env[61978]: DEBUG nova.network.neutron [req-ce4002f8-0e7f-4328-9d4b-930d195d77d8 req-887a8ec7-dbb5-4680-814c-e0f92fcc9542 service nova] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Refreshing network info cache for port f10bef80-f5ec-40ab-bb8e-c1c9973e4e66 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 955.633574] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394883, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.736472] env[61978]: DEBUG nova.compute.utils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 955.739626] env[61978]: DEBUG nova.compute.manager [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 955.739800] env[61978]: DEBUG nova.network.neutron [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 955.764107] env[61978]: INFO nova.compute.manager [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Took 43.45 seconds to build instance. [ 955.779739] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394884, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085904} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.780064] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 955.780870] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89d6e6d-adf0-4105-bfed-634700dd14c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.815904] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] dd686727-fc33-4dc4-b386-aabec27cf215/dd686727-fc33-4dc4-b386-aabec27cf215.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 955.819187] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "refresh_cache-ea1c2d74-70b4-4547-a887-78e291c3082a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.819336] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired lock "refresh_cache-ea1c2d74-70b4-4547-a887-78e291c3082a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.819479] env[61978]: DEBUG nova.network.neutron [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 955.820776] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3adfd2da-6d7d-4847-80a3-a1a1c30df927 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.840969] env[61978]: DEBUG nova.policy [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e607dfc944154c1faed12da382640f0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d7394d965f94155a34dd0ecc0957649', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 955.851962] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 955.851962] env[61978]: value = "task-1394885" [ 955.851962] env[61978]: _type = "Task" [ 955.851962] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.866516] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394885, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.887980] env[61978]: DEBUG nova.network.neutron [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 956.069021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "8f609401-af09-4291-a1e7-a356fbc4aac9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.069156] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "8f609401-af09-4291-a1e7-a356fbc4aac9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.128620] env[61978]: DEBUG nova.network.neutron [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Updating instance_info_cache with network_info: [{"id": "35d1b15c-e867-4d4b-8d09-58369a8e74e1", "address": "fa:16:3e:32:e0:24", "network": {"id": "2f12a4fc-0daf-4101-919d-9b9372dcbb2f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1016739189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27ccd1f7b852490a8d92e2c0e714e7d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35d1b15c-e8", "ovs_interfaceid": "35d1b15c-e867-4d4b-8d09-58369a8e74e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.139256] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394883, 'name': CreateVM_Task, 'duration_secs': 0.813632} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.139522] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 956.140265] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.140445] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.140745] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 956.141015] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76e4576e-14f2-40d4-9b5e-212c17fde79b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.152570] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 956.152570] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fa11b8-64a0-2fbf-ff60-d1aa2f41ef73" [ 956.152570] env[61978]: _type = "Task" [ 956.152570] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.162540] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fa11b8-64a0-2fbf-ff60-d1aa2f41ef73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.245874] env[61978]: DEBUG nova.compute.manager [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 956.279317] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86ea3f6d-2e81-4de2-a526-dee4b61aa27e tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.039s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.294154] env[61978]: DEBUG nova.network.neutron [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Successfully created port: cc183679-2e0d-4d97-9429-82606794bea3 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.349365] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba1159f-31a4-4688-9b9b-5467f510d420 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.361692] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394885, 'name': ReconfigVM_Task, 'duration_secs': 0.37858} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.363522] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Reconfigured VM instance instance-00000022 to attach disk [datastore1] dd686727-fc33-4dc4-b386-aabec27cf215/dd686727-fc33-4dc4-b386-aabec27cf215.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 956.364166] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2fca5ed5-9168-4022-b090-6bfed2fab06d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.366436] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4174364e-9255-481b-aebe-8ff8b5b77a90 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.404551] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71b39d7-3cf0-4a05-ba77-83070c079334 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.407164] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 956.407164] env[61978]: value = "task-1394886" [ 956.407164] env[61978]: _type = "Task" [ 956.407164] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.414391] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4775fea5-17ae-4727-9e0d-e44c7d60566c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.421775] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394886, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.431262] env[61978]: DEBUG nova.compute.provider_tree [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.477715] env[61978]: DEBUG nova.network.neutron [req-ce4002f8-0e7f-4328-9d4b-930d195d77d8 req-887a8ec7-dbb5-4680-814c-e0f92fcc9542 service nova] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Updated VIF entry in instance network info cache for port f10bef80-f5ec-40ab-bb8e-c1c9973e4e66. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 956.478088] env[61978]: DEBUG nova.network.neutron [req-ce4002f8-0e7f-4328-9d4b-930d195d77d8 req-887a8ec7-dbb5-4680-814c-e0f92fcc9542 service nova] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Updating instance_info_cache with network_info: [{"id": "f10bef80-f5ec-40ab-bb8e-c1c9973e4e66", "address": "fa:16:3e:59:23:5d", "network": {"id": "2f12a4fc-0daf-4101-919d-9b9372dcbb2f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1016739189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27ccd1f7b852490a8d92e2c0e714e7d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf10bef80-f5", "ovs_interfaceid": "f10bef80-f5ec-40ab-bb8e-c1c9973e4e66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.608186] env[61978]: DEBUG nova.compute.manager [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 956.609159] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccc4b4b-48c3-4b19-bd76-3f4f51c8902d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.633878] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Releasing lock "refresh_cache-ea1c2d74-70b4-4547-a887-78e291c3082a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.634060] env[61978]: DEBUG nova.compute.manager [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Instance network_info: |[{"id": "35d1b15c-e867-4d4b-8d09-58369a8e74e1", "address": "fa:16:3e:32:e0:24", "network": {"id": "2f12a4fc-0daf-4101-919d-9b9372dcbb2f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1016739189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27ccd1f7b852490a8d92e2c0e714e7d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35d1b15c-e8", "ovs_interfaceid": "35d1b15c-e867-4d4b-8d09-58369a8e74e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 956.634694] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:e0:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35d1b15c-e867-4d4b-8d09-58369a8e74e1', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 956.649450] env[61978]: DEBUG oslo.service.loopingcall [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 956.650811] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 956.651130] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef55c0dc-b953-43eb-b1c7-b424e13fb6e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.684681] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fa11b8-64a0-2fbf-ff60-d1aa2f41ef73, 'name': SearchDatastore_Task, 'duration_secs': 0.010096} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.686064] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.686317] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 956.686554] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.686700] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.686880] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 956.687142] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 956.687142] env[61978]: value = "task-1394887" [ 956.687142] env[61978]: _type = "Task" [ 956.687142] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.687321] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f0238c6-c786-4c8d-a9cc-34796d296ad3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.698748] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394887, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.704141] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 956.704348] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 956.705211] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70e9c174-dc27-4bed-87cb-ea87854d31dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.712889] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 956.712889] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c15991-563b-a0bd-faf9-9bee885b4739" [ 956.712889] env[61978]: _type = "Task" [ 956.712889] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.722394] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c15991-563b-a0bd-faf9-9bee885b4739, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.783069] env[61978]: DEBUG nova.compute.manager [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 956.919431] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394886, 'name': Rename_Task, 'duration_secs': 0.280762} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.919741] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.920020] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3883d718-6a8a-4cad-b853-61a7480cda8c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.927986] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 956.927986] env[61978]: value = "task-1394888" [ 956.927986] env[61978]: _type = "Task" [ 956.927986] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.935789] env[61978]: DEBUG nova.scheduler.client.report [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 956.945329] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394888, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.981745] env[61978]: DEBUG oslo_concurrency.lockutils [req-ce4002f8-0e7f-4328-9d4b-930d195d77d8 req-887a8ec7-dbb5-4680-814c-e0f92fcc9542 service nova] Releasing lock "refresh_cache-3a30ecc4-455f-49cf-98e8-d38be6a1c5a5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.122686] env[61978]: INFO nova.compute.manager [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] instance snapshotting [ 957.122953] env[61978]: WARNING nova.compute.manager [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 957.127253] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f664914-d6b2-4510-9d09-863873e2c15e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.147081] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6731ab-9d76-4630-ba87-5dba1e648763 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.200664] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394887, 'name': CreateVM_Task, 'duration_secs': 0.42687} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.200664] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 957.200664] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.200664] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.200859] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 957.201344] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a544811-dcd0-43f5-ad04-7f884b9a6934 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.207796] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 957.207796] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52628b94-9d20-8550-ede4-3d514705f199" [ 957.207796] env[61978]: _type = "Task" [ 957.207796] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.218560] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52628b94-9d20-8550-ede4-3d514705f199, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.224476] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c15991-563b-a0bd-faf9-9bee885b4739, 'name': SearchDatastore_Task, 'duration_secs': 0.028087} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.225301] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-775c23db-1d52-499e-ac2c-b84d2fc675f6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.231368] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 957.231368] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52569fd8-58e8-5cd8-90d9-a5e1a28f4363" [ 957.231368] env[61978]: _type = "Task" [ 957.231368] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.239363] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52569fd8-58e8-5cd8-90d9-a5e1a28f4363, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.254747] env[61978]: DEBUG nova.compute.manager [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 957.276634] env[61978]: DEBUG nova.virt.hardware [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 957.276859] env[61978]: DEBUG nova.virt.hardware [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 957.277026] env[61978]: DEBUG nova.virt.hardware [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.277206] env[61978]: DEBUG nova.virt.hardware [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 957.277349] env[61978]: DEBUG nova.virt.hardware [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.277501] env[61978]: DEBUG nova.virt.hardware [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 957.277735] env[61978]: DEBUG nova.virt.hardware [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 957.277903] env[61978]: DEBUG nova.virt.hardware [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 957.278083] env[61978]: DEBUG nova.virt.hardware [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 957.278254] env[61978]: DEBUG nova.virt.hardware [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 957.278430] env[61978]: DEBUG nova.virt.hardware [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 957.279318] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354c7e43-c7b4-45d5-b360-5637fc3c497e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.287942] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573f7296-9599-4716-9dae-28a291c2debe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.311301] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.438622] env[61978]: DEBUG oslo_vmware.api [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394888, 'name': PowerOnVM_Task, 'duration_secs': 0.493981} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.438897] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.439119] env[61978]: INFO nova.compute.manager [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Took 8.32 seconds to spawn the instance on the hypervisor. [ 957.439306] env[61978]: DEBUG nova.compute.manager [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 957.440159] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6c352e-8d36-4bde-bc2e-5f7a53f0fe87 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.443607] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.209s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.445708] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.070s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.447250] env[61978]: INFO nova.compute.claims [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.470644] env[61978]: INFO nova.scheduler.client.report [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Deleted allocations for instance 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b [ 957.537830] env[61978]: DEBUG nova.compute.manager [req-8e463bdf-3f3a-4032-8331-7eef9f18a56c req-e3644cdc-d0e4-4674-81b6-72bb9b99cfa8 service nova] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Received event network-changed-35d1b15c-e867-4d4b-8d09-58369a8e74e1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 957.538047] env[61978]: DEBUG nova.compute.manager [req-8e463bdf-3f3a-4032-8331-7eef9f18a56c req-e3644cdc-d0e4-4674-81b6-72bb9b99cfa8 service nova] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Refreshing instance network info cache due to event network-changed-35d1b15c-e867-4d4b-8d09-58369a8e74e1. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 957.538629] env[61978]: DEBUG oslo_concurrency.lockutils [req-8e463bdf-3f3a-4032-8331-7eef9f18a56c req-e3644cdc-d0e4-4674-81b6-72bb9b99cfa8 service nova] Acquiring lock "refresh_cache-ea1c2d74-70b4-4547-a887-78e291c3082a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.538629] env[61978]: DEBUG oslo_concurrency.lockutils [req-8e463bdf-3f3a-4032-8331-7eef9f18a56c req-e3644cdc-d0e4-4674-81b6-72bb9b99cfa8 service nova] Acquired lock "refresh_cache-ea1c2d74-70b4-4547-a887-78e291c3082a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.538629] env[61978]: DEBUG nova.network.neutron [req-8e463bdf-3f3a-4032-8331-7eef9f18a56c req-e3644cdc-d0e4-4674-81b6-72bb9b99cfa8 service nova] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Refreshing network info cache for port 35d1b15c-e867-4d4b-8d09-58369a8e74e1 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 957.659690] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 957.660037] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cf61a2c2-7021-44b2-913d-60d0134126d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.670631] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 957.670631] env[61978]: value = "task-1394889" [ 957.670631] env[61978]: _type = "Task" [ 957.670631] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.683279] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394889, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.721057] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52628b94-9d20-8550-ede4-3d514705f199, 'name': SearchDatastore_Task, 'duration_secs': 0.010912} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.721481] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.721761] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.722125] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.743026] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52569fd8-58e8-5cd8-90d9-a5e1a28f4363, 'name': SearchDatastore_Task, 'duration_secs': 0.010038} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.744069] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.744069] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5/3a30ecc4-455f-49cf-98e8-d38be6a1c5a5.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 957.744069] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.744291] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 957.744359] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d65d6a58-3f18-4e1a-9f58-31eff3fabcc0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.746949] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0768b5b0-8aa5-4425-b49c-1d896d5cc249 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.755944] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 957.755944] env[61978]: value = "task-1394890" [ 957.755944] env[61978]: _type = "Task" [ 957.755944] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.760522] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 957.760728] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 957.764303] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34c85759-f3fc-4427-a2d3-b034cf7b2c20 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.771123] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.776350] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 957.776350] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524d912c-420e-71e2-ab8e-7d8437e4f184" [ 957.776350] env[61978]: _type = "Task" [ 957.776350] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.790369] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524d912c-420e-71e2-ab8e-7d8437e4f184, 'name': SearchDatastore_Task} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.791509] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3834189f-2938-46b0-970b-95ff3fdd8e3d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.802388] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 957.802388] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]525c2a02-d2ef-074e-910d-6155e14e7b16" [ 957.802388] env[61978]: _type = "Task" [ 957.802388] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.814371] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]525c2a02-d2ef-074e-910d-6155e14e7b16, 'name': SearchDatastore_Task, 'duration_secs': 0.010314} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.814735] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.815056] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] ea1c2d74-70b4-4547-a887-78e291c3082a/ea1c2d74-70b4-4547-a887-78e291c3082a.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 957.815404] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db7f0a83-7e16-476d-99fe-e8401d7e535c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.824918] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 957.824918] env[61978]: value = "task-1394891" [ 957.824918] env[61978]: _type = "Task" [ 957.824918] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.837315] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394891, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.968154] env[61978]: INFO nova.compute.manager [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Took 39.84 seconds to build instance. [ 957.980802] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2681cd75-c3df-4251-b95f-41b8a300a2ee tempest-FloatingIPsAssociationTestJSON-1889719302 tempest-FloatingIPsAssociationTestJSON-1889719302-project-member] Lock "9b6b4da7-4f86-46bc-a75f-fc5e1126c53b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.733s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.159856] env[61978]: DEBUG nova.network.neutron [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Successfully updated port: cc183679-2e0d-4d97-9429-82606794bea3 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 958.187065] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394889, 'name': CreateSnapshot_Task, 'duration_secs': 0.494102} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.187065] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 958.187148] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03e7a10-172f-4ef4-af9c-e4f255c406bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.268111] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394890, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.340481] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394891, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.381421] env[61978]: DEBUG nova.network.neutron [req-8e463bdf-3f3a-4032-8331-7eef9f18a56c req-e3644cdc-d0e4-4674-81b6-72bb9b99cfa8 service nova] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Updated VIF entry in instance network info cache for port 35d1b15c-e867-4d4b-8d09-58369a8e74e1. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 958.381829] env[61978]: DEBUG nova.network.neutron [req-8e463bdf-3f3a-4032-8331-7eef9f18a56c req-e3644cdc-d0e4-4674-81b6-72bb9b99cfa8 service nova] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Updating instance_info_cache with network_info: [{"id": "35d1b15c-e867-4d4b-8d09-58369a8e74e1", "address": "fa:16:3e:32:e0:24", "network": {"id": "2f12a4fc-0daf-4101-919d-9b9372dcbb2f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1016739189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27ccd1f7b852490a8d92e2c0e714e7d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35d1b15c-e8", "ovs_interfaceid": "35d1b15c-e867-4d4b-8d09-58369a8e74e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.467199] env[61978]: DEBUG oslo_concurrency.lockutils [None req-324e40f8-6d5e-4349-9e16-25c194d0ef91 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lock "dd686727-fc33-4dc4-b386-aabec27cf215" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.802s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.664878] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "refresh_cache-8a21e6a7-c34e-4af0-b1fd-8a501694614c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.665145] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "refresh_cache-8a21e6a7-c34e-4af0-b1fd-8a501694614c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.665190] env[61978]: DEBUG nova.network.neutron [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.710068] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 958.713719] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-89dbce5e-4e45-469f-8acd-0a126a1c956d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.727668] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 958.727668] env[61978]: value = "task-1394892" [ 958.727668] env[61978]: _type = "Task" [ 958.727668] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.746962] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394892, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.767523] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394890, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520627} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.770358] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5/3a30ecc4-455f-49cf-98e8-d38be6a1c5a5.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 958.770663] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 958.771873] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f368d3a0-d64a-43c8-8697-2c1a5d09ab2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.779491] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 958.779491] env[61978]: value = "task-1394893" [ 958.779491] env[61978]: _type = "Task" [ 958.779491] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.792936] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394893, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.839174] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394891, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.791076} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.843573] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] ea1c2d74-70b4-4547-a887-78e291c3082a/ea1c2d74-70b4-4547-a887-78e291c3082a.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 958.843687] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 958.844737] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d14bede5-2876-4601-8c2e-86d02ee430e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.853570] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 958.853570] env[61978]: value = "task-1394894" [ 958.853570] env[61978]: _type = "Task" [ 958.853570] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.872880] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394894, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.889463] env[61978]: DEBUG oslo_concurrency.lockutils [req-8e463bdf-3f3a-4032-8331-7eef9f18a56c req-e3644cdc-d0e4-4674-81b6-72bb9b99cfa8 service nova] Releasing lock "refresh_cache-ea1c2d74-70b4-4547-a887-78e291c3082a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.971580] env[61978]: DEBUG nova.compute.manager [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 959.030892] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e4ed51-1786-4783-997b-ccfbb131f93d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.039953] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3bdb356-4c15-4355-8e1d-4834ae51d154 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.077713] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01f3ecb-d3da-4a3a-ad98-419bc32966ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.086395] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d664c74d-ae7d-4e46-901d-10b7260b3710 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.101743] env[61978]: DEBUG nova.compute.provider_tree [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.200086] env[61978]: DEBUG nova.network.neutron [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 959.241810] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394892, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.290353] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394893, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077914} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.293887] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.294816] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2600594-7201-45e4-b17c-e27a3e6ee252 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.325094] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5/3a30ecc4-455f-49cf-98e8-d38be6a1c5a5.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.325828] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29e16671-a7b2-45a3-b3da-e94630d3cd39 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.352298] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 959.352298] env[61978]: value = "task-1394895" [ 959.352298] env[61978]: _type = "Task" [ 959.352298] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.365467] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394894, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079179} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.368872] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.369254] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394895, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.370687] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2a21cc-f4d9-442a-b1c6-16500f09f230 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.396436] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] ea1c2d74-70b4-4547-a887-78e291c3082a/ea1c2d74-70b4-4547-a887-78e291c3082a.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.397769] env[61978]: DEBUG nova.network.neutron [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Updating instance_info_cache with network_info: [{"id": "cc183679-2e0d-4d97-9429-82606794bea3", "address": "fa:16:3e:6d:c1:68", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc183679-2e", "ovs_interfaceid": "cc183679-2e0d-4d97-9429-82606794bea3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.399268] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c421c88-d334-4bd3-b5fd-4c5412ef2ecb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.415790] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "refresh_cache-8a21e6a7-c34e-4af0-b1fd-8a501694614c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.415895] env[61978]: DEBUG nova.compute.manager [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Instance network_info: |[{"id": "cc183679-2e0d-4d97-9429-82606794bea3", "address": "fa:16:3e:6d:c1:68", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc183679-2e", "ovs_interfaceid": "cc183679-2e0d-4d97-9429-82606794bea3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 959.416511] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:c1:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4c8c8fd-baca-4e60-97dc-ff0418d63215', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc183679-2e0d-4d97-9429-82606794bea3', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.424367] env[61978]: DEBUG oslo.service.loopingcall [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.425349] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 959.426014] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0ddf208-bcfb-42bc-ae39-74bfe9f2e694 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.443353] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 959.443353] env[61978]: value = "task-1394896" [ 959.443353] env[61978]: _type = "Task" [ 959.443353] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.453299] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394896, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.454848] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.454848] env[61978]: value = "task-1394897" [ 959.454848] env[61978]: _type = "Task" [ 959.454848] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.465949] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394897, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.494306] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.567671] env[61978]: DEBUG nova.compute.manager [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Received event network-vif-plugged-cc183679-2e0d-4d97-9429-82606794bea3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 959.567915] env[61978]: DEBUG oslo_concurrency.lockutils [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] Acquiring lock "8a21e6a7-c34e-4af0-b1fd-8a501694614c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.568206] env[61978]: DEBUG oslo_concurrency.lockutils [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] Lock "8a21e6a7-c34e-4af0-b1fd-8a501694614c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.568397] env[61978]: DEBUG oslo_concurrency.lockutils [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] Lock "8a21e6a7-c34e-4af0-b1fd-8a501694614c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.568568] env[61978]: DEBUG nova.compute.manager [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] No waiting events found dispatching network-vif-plugged-cc183679-2e0d-4d97-9429-82606794bea3 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 959.568734] env[61978]: WARNING nova.compute.manager [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Received unexpected event network-vif-plugged-cc183679-2e0d-4d97-9429-82606794bea3 for instance with vm_state building and task_state spawning. [ 959.568892] env[61978]: DEBUG nova.compute.manager [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Received event network-changed-cc183679-2e0d-4d97-9429-82606794bea3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 959.569057] env[61978]: DEBUG nova.compute.manager [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Refreshing instance network info cache due to event network-changed-cc183679-2e0d-4d97-9429-82606794bea3. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 959.569250] env[61978]: DEBUG oslo_concurrency.lockutils [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] Acquiring lock "refresh_cache-8a21e6a7-c34e-4af0-b1fd-8a501694614c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.569383] env[61978]: DEBUG oslo_concurrency.lockutils [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] Acquired lock "refresh_cache-8a21e6a7-c34e-4af0-b1fd-8a501694614c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.569553] env[61978]: DEBUG nova.network.neutron [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Refreshing network info cache for port cc183679-2e0d-4d97-9429-82606794bea3 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 959.606485] env[61978]: DEBUG nova.scheduler.client.report [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 959.749650] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394892, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.827522] env[61978]: DEBUG nova.compute.manager [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 959.828442] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7bd8b8-f2cf-4965-a450-7e9be3659d3f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.867978] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394895, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.958629] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394896, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.979273] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394897, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.111949] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.112711] env[61978]: DEBUG nova.compute.manager [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 960.118021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.572s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.118021] env[61978]: INFO nova.compute.claims [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.245329] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394892, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.342818] env[61978]: INFO nova.compute.manager [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] instance snapshotting [ 960.345843] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc98d46-85ed-473d-b503-e96a1ff0e883 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.378165] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83630e4a-c649-4ab5-8094-db164a40dba9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.386776] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394895, 'name': ReconfigVM_Task, 'duration_secs': 0.836949} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.388650] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5/3a30ecc4-455f-49cf-98e8-d38be6a1c5a5.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.391354] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b3b90fe-5d24-4018-b64a-f70a4b14479c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.399890] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 960.399890] env[61978]: value = "task-1394898" [ 960.399890] env[61978]: _type = "Task" [ 960.399890] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.409817] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394898, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.445592] env[61978]: DEBUG nova.network.neutron [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Updated VIF entry in instance network info cache for port cc183679-2e0d-4d97-9429-82606794bea3. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 960.446218] env[61978]: DEBUG nova.network.neutron [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Updating instance_info_cache with network_info: [{"id": "cc183679-2e0d-4d97-9429-82606794bea3", "address": "fa:16:3e:6d:c1:68", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc183679-2e", "ovs_interfaceid": "cc183679-2e0d-4d97-9429-82606794bea3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.457442] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394896, 'name': ReconfigVM_Task, 'duration_secs': 0.652917} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.457803] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Reconfigured VM instance instance-00000024 to attach disk [datastore1] ea1c2d74-70b4-4547-a887-78e291c3082a/ea1c2d74-70b4-4547-a887-78e291c3082a.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.458796] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07ead73c-a212-412e-9458-754aecea6d68 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.471221] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394897, 'name': CreateVM_Task, 'duration_secs': 0.740944} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.472516] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 960.472838] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 960.472838] env[61978]: value = "task-1394899" [ 960.472838] env[61978]: _type = "Task" [ 960.472838] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.473493] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.473659] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.473972] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 960.474278] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d4d7bd0-46fd-45ff-9b26-57d885599e79 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.488036] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 960.488036] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5238cd50-0e5b-e51d-6023-6d1dc12f7106" [ 960.488036] env[61978]: _type = "Task" [ 960.488036] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.488343] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394899, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.499788] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5238cd50-0e5b-e51d-6023-6d1dc12f7106, 'name': SearchDatastore_Task, 'duration_secs': 0.011755} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.500112] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.500578] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.500578] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.500720] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.500894] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.501187] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2a748e6-ae36-400e-81ee-c5aec1bf1cdc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.512604] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.512808] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 960.513622] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2559293-86e5-46b6-93af-970c1722571e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.520115] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 960.520115] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52501f58-9fab-69ac-9304-8a12f4b1160d" [ 960.520115] env[61978]: _type = "Task" [ 960.520115] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.530183] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52501f58-9fab-69ac-9304-8a12f4b1160d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.626248] env[61978]: DEBUG nova.compute.utils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 960.627713] env[61978]: DEBUG nova.compute.manager [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 960.627926] env[61978]: DEBUG nova.network.neutron [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 960.689760] env[61978]: DEBUG nova.policy [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '346f982562de48fab1702aca567113ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a4f29a959447159b2f7d194ea94873', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 960.743955] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394892, 'name': CloneVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.895117] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 960.895666] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3b0bcda8-7bca-43ca-8fb6-9285503a0ea2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.906123] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 960.906123] env[61978]: value = "task-1394900" [ 960.906123] env[61978]: _type = "Task" [ 960.906123] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.915904] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394898, 'name': Rename_Task, 'duration_secs': 0.249168} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.916648] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 960.916961] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0aff51ef-c898-4692-82a1-c1f522e8f2dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.925440] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394900, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.933114] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 960.933114] env[61978]: value = "task-1394901" [ 960.933114] env[61978]: _type = "Task" [ 960.933114] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.942525] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.954410] env[61978]: DEBUG oslo_concurrency.lockutils [req-e8e2c88c-c487-4af2-bb0e-ab38f9a9c21c req-9c80f44b-a3b3-44b1-9f01-d0458985fa9b service nova] Releasing lock "refresh_cache-8a21e6a7-c34e-4af0-b1fd-8a501694614c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.986652] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394899, 'name': Rename_Task, 'duration_secs': 0.253318} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.986652] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 960.986652] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-567d0b09-84fd-4586-a289-88f6f1b9e45d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.990335] env[61978]: DEBUG nova.network.neutron [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Successfully created port: b04a501f-29a8-442a-9f2c-dddd76f5e335 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 960.996029] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 960.996029] env[61978]: value = "task-1394902" [ 960.996029] env[61978]: _type = "Task" [ 960.996029] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.008741] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394902, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.035270] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52501f58-9fab-69ac-9304-8a12f4b1160d, 'name': SearchDatastore_Task, 'duration_secs': 0.027795} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.035270] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e43e3da-daa0-40b9-83a6-add42a8abbf3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.040332] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 961.040332] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d07b01-fc74-bdc2-cba7-e635123ad60e" [ 961.040332] env[61978]: _type = "Task" [ 961.040332] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.050939] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d07b01-fc74-bdc2-cba7-e635123ad60e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.134987] env[61978]: DEBUG nova.compute.manager [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 961.247554] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394892, 'name': CloneVM_Task, 'duration_secs': 2.059375} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.251766] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Created linked-clone VM from snapshot [ 961.253475] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2f841a-facd-4935-b99e-3efb9bd0c2e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.262894] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Uploading image 7e38f3f1-4b65-4030-8b1a-af8f828b3d96 {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 961.291258] env[61978]: DEBUG oslo_vmware.rw_handles [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 961.291258] env[61978]: value = "vm-295865" [ 961.291258] env[61978]: _type = "VirtualMachine" [ 961.291258] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 961.291570] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-203a4a76-339d-447d-8045-ac444735b759 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.301017] env[61978]: DEBUG oslo_vmware.rw_handles [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lease: (returnval){ [ 961.301017] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]525454e6-4685-ba0b-9c4e-dd3969a918f9" [ 961.301017] env[61978]: _type = "HttpNfcLease" [ 961.301017] env[61978]: } obtained for exporting VM: (result){ [ 961.301017] env[61978]: value = "vm-295865" [ 961.301017] env[61978]: _type = "VirtualMachine" [ 961.301017] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 961.301318] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the lease: (returnval){ [ 961.301318] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]525454e6-4685-ba0b-9c4e-dd3969a918f9" [ 961.301318] env[61978]: _type = "HttpNfcLease" [ 961.301318] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 961.309759] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 961.309759] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]525454e6-4685-ba0b-9c4e-dd3969a918f9" [ 961.309759] env[61978]: _type = "HttpNfcLease" [ 961.309759] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 961.421433] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394900, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.447229] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394901, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.507646] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394902, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.555592] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d07b01-fc74-bdc2-cba7-e635123ad60e, 'name': SearchDatastore_Task, 'duration_secs': 0.012098} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.559152] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.559480] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 8a21e6a7-c34e-4af0-b1fd-8a501694614c/8a21e6a7-c34e-4af0-b1fd-8a501694614c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 961.559994] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba0d3892-6732-45e3-9c98-869620fba6db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.576366] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 961.576366] env[61978]: value = "task-1394904" [ 961.576366] env[61978]: _type = "Task" [ 961.576366] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.589610] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.769974] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7865e00-06fe-4b6b-acac-fc6c3770efc2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.777553] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe90d09-754f-40a7-89cd-377dcb256c02 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.816558] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3557bb1-a7ca-4e16-b37f-5aa648f85625 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.826930] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 961.826930] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]525454e6-4685-ba0b-9c4e-dd3969a918f9" [ 961.826930] env[61978]: _type = "HttpNfcLease" [ 961.826930] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 961.827523] env[61978]: DEBUG oslo_vmware.rw_handles [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 961.827523] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]525454e6-4685-ba0b-9c4e-dd3969a918f9" [ 961.827523] env[61978]: _type = "HttpNfcLease" [ 961.827523] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 961.828833] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117501d6-aa82-4a91-9b55-d66f897ae4d2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.833554] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e8e4c3-7043-4d78-874c-0e8221e20d75 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.849472] env[61978]: DEBUG nova.compute.provider_tree [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.855152] env[61978]: DEBUG oslo_vmware.rw_handles [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527965c6-13c0-3726-4042-2cc153bda0b9/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 961.855356] env[61978]: DEBUG oslo_vmware.rw_handles [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527965c6-13c0-3726-4042-2cc153bda0b9/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 961.932469] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394900, 'name': CreateSnapshot_Task, 'duration_secs': 0.777455} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.933149] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 961.933949] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04624a6e-e55a-4905-adaa-24cce24adbe7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.949691] env[61978]: DEBUG oslo_vmware.api [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394901, 'name': PowerOnVM_Task, 'duration_secs': 0.659377} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.955939] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.955939] env[61978]: INFO nova.compute.manager [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Took 10.19 seconds to spawn the instance on the hypervisor. [ 961.955939] env[61978]: DEBUG nova.compute.manager [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 961.955939] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25ff74e-187f-4a13-b288-e26064049a68 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.004956] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f5b1ad38-38a0-4129-9f55-8fb0b15b6a82 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.010936] env[61978]: DEBUG oslo_vmware.api [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394902, 'name': PowerOnVM_Task, 'duration_secs': 0.611283} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.012189] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.012454] env[61978]: INFO nova.compute.manager [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Took 7.99 seconds to spawn the instance on the hypervisor. [ 962.012707] env[61978]: DEBUG nova.compute.manager [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 962.014197] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2cf07cd-f19f-4ac3-96e4-4235098dd3aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.086593] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394904, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511787} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.086882] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 8a21e6a7-c34e-4af0-b1fd-8a501694614c/8a21e6a7-c34e-4af0-b1fd-8a501694614c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 962.087106] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 962.087367] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e77821c9-c84b-480c-9628-dab1a4a80870 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.096251] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 962.096251] env[61978]: value = "task-1394905" [ 962.096251] env[61978]: _type = "Task" [ 962.096251] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.103689] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394905, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.148243] env[61978]: DEBUG nova.compute.manager [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 962.172399] env[61978]: DEBUG nova.virt.hardware [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 962.172631] env[61978]: DEBUG nova.virt.hardware [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 962.172778] env[61978]: DEBUG nova.virt.hardware [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 962.172948] env[61978]: DEBUG nova.virt.hardware [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 962.173098] env[61978]: DEBUG nova.virt.hardware [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 962.173237] env[61978]: DEBUG nova.virt.hardware [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 962.173469] env[61978]: DEBUG nova.virt.hardware [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 962.173622] env[61978]: DEBUG nova.virt.hardware [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 962.173780] env[61978]: DEBUG nova.virt.hardware [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 962.173935] env[61978]: DEBUG nova.virt.hardware [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 962.174109] env[61978]: DEBUG nova.virt.hardware [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 962.175012] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93455e67-a971-4a25-aec4-9ce2c58269c7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.183948] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600f022a-427b-4e8f-a144-9b22e2b12ea2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.352436] env[61978]: DEBUG nova.scheduler.client.report [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 962.461559] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 962.461879] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-310fc8ac-31db-4486-a7c1-2dfd574c919a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.471831] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 962.471831] env[61978]: value = "task-1394906" [ 962.471831] env[61978]: _type = "Task" [ 962.471831] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.482988] env[61978]: INFO nova.compute.manager [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Took 42.52 seconds to build instance. [ 962.539895] env[61978]: INFO nova.compute.manager [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Took 38.62 seconds to build instance. [ 962.601426] env[61978]: DEBUG nova.compute.manager [req-fbac9442-6e00-4a9d-8946-0b6efcbc9837 req-69928b57-1e2d-4884-9e29-b177dfd6d49a service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Received event network-vif-plugged-b04a501f-29a8-442a-9f2c-dddd76f5e335 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.601663] env[61978]: DEBUG oslo_concurrency.lockutils [req-fbac9442-6e00-4a9d-8946-0b6efcbc9837 req-69928b57-1e2d-4884-9e29-b177dfd6d49a service nova] Acquiring lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.601883] env[61978]: DEBUG oslo_concurrency.lockutils [req-fbac9442-6e00-4a9d-8946-0b6efcbc9837 req-69928b57-1e2d-4884-9e29-b177dfd6d49a service nova] Lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.602181] env[61978]: DEBUG oslo_concurrency.lockutils [req-fbac9442-6e00-4a9d-8946-0b6efcbc9837 req-69928b57-1e2d-4884-9e29-b177dfd6d49a service nova] Lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.602297] env[61978]: DEBUG nova.compute.manager [req-fbac9442-6e00-4a9d-8946-0b6efcbc9837 req-69928b57-1e2d-4884-9e29-b177dfd6d49a service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] No waiting events found dispatching network-vif-plugged-b04a501f-29a8-442a-9f2c-dddd76f5e335 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 962.602483] env[61978]: WARNING nova.compute.manager [req-fbac9442-6e00-4a9d-8946-0b6efcbc9837 req-69928b57-1e2d-4884-9e29-b177dfd6d49a service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Received unexpected event network-vif-plugged-b04a501f-29a8-442a-9f2c-dddd76f5e335 for instance with vm_state building and task_state spawning. [ 962.610143] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394905, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075915} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.610539] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.611726] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf22b25a-36b7-4ef5-9bdb-a4ed13bebe06 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.650497] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] 8a21e6a7-c34e-4af0-b1fd-8a501694614c/8a21e6a7-c34e-4af0-b1fd-8a501694614c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.651104] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d61dc26a-9222-45ea-8300-f9e389ce3055 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.678651] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 962.678651] env[61978]: value = "task-1394907" [ 962.678651] env[61978]: _type = "Task" [ 962.678651] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.691683] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394907, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.800449] env[61978]: DEBUG nova.network.neutron [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Successfully updated port: b04a501f-29a8-442a-9f2c-dddd76f5e335 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 962.858161] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.742s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.858723] env[61978]: DEBUG nova.compute.manager [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 962.864190] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.746s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.864190] env[61978]: INFO nova.compute.claims [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 962.983775] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394906, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.985428] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7516f7e1-e945-466d-9dcf-603f6ca40043 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "3a30ecc4-455f-49cf-98e8-d38be6a1c5a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.931s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.042343] env[61978]: DEBUG oslo_concurrency.lockutils [None req-067278f8-03b2-4ad8-a653-f1e43abee11b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "ea1c2d74-70b4-4547-a887-78e291c3082a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.740s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.197744] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394907, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.309628] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.309628] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.309628] env[61978]: DEBUG nova.network.neutron [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 963.377463] env[61978]: DEBUG nova.compute.utils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 963.379524] env[61978]: DEBUG nova.compute.manager [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 963.379524] env[61978]: DEBUG nova.network.neutron [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 963.449906] env[61978]: DEBUG nova.policy [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '031a98d4e0f345c28fd226142db35516', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86f4ae0b29af4ee2b33e5a499cf1e899', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 963.485867] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394906, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.487723] env[61978]: DEBUG nova.compute.manager [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 963.546397] env[61978]: DEBUG nova.compute.manager [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 963.693434] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394907, 'name': ReconfigVM_Task, 'duration_secs': 0.543664} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.694257] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Reconfigured VM instance instance-00000025 to attach disk [datastore2] 8a21e6a7-c34e-4af0-b1fd-8a501694614c/8a21e6a7-c34e-4af0-b1fd-8a501694614c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 963.694447] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47b5e8cb-92e1-45e5-a676-5b5ed0f3a42e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.702275] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 963.702275] env[61978]: value = "task-1394908" [ 963.702275] env[61978]: _type = "Task" [ 963.702275] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.712836] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394908, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.874355] env[61978]: DEBUG nova.network.neutron [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 963.884862] env[61978]: DEBUG nova.compute.manager [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 963.992641] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394906, 'name': CloneVM_Task, 'duration_secs': 1.46271} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.995979] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Created linked-clone VM from snapshot [ 963.996966] env[61978]: DEBUG nova.network.neutron [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Successfully created port: 461cf97f-d4c1-4a04-bc0f-ea10c52ecce3 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 964.002546] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ee2a74-c45a-407e-a3c1-7329ba39def0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.017943] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Uploading image e2653038-5e10-426c-af6a-90f08b37f43d {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 964.029708] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.058607] env[61978]: DEBUG oslo_vmware.rw_handles [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 964.058607] env[61978]: value = "vm-295868" [ 964.058607] env[61978]: _type = "VirtualMachine" [ 964.058607] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 964.059551] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5febe67d-5a23-4b93-a8ac-c9faf245db72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.070892] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.073087] env[61978]: DEBUG oslo_vmware.rw_handles [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lease: (returnval){ [ 964.073087] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f3e2b3-e57d-e8cd-1366-01b9451f4afc" [ 964.073087] env[61978]: _type = "HttpNfcLease" [ 964.073087] env[61978]: } obtained for exporting VM: (result){ [ 964.073087] env[61978]: value = "vm-295868" [ 964.073087] env[61978]: _type = "VirtualMachine" [ 964.073087] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 964.073475] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the lease: (returnval){ [ 964.073475] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f3e2b3-e57d-e8cd-1366-01b9451f4afc" [ 964.073475] env[61978]: _type = "HttpNfcLease" [ 964.073475] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 964.084450] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 964.084450] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f3e2b3-e57d-e8cd-1366-01b9451f4afc" [ 964.084450] env[61978]: _type = "HttpNfcLease" [ 964.084450] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 964.222874] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394908, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.302276] env[61978]: DEBUG nova.network.neutron [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Updating instance_info_cache with network_info: [{"id": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "address": "fa:16:3e:62:14:fb", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb04a501f-29", "ovs_interfaceid": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.595161] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 964.595161] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f3e2b3-e57d-e8cd-1366-01b9451f4afc" [ 964.595161] env[61978]: _type = "HttpNfcLease" [ 964.595161] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 964.595161] env[61978]: DEBUG oslo_vmware.rw_handles [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 964.595161] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f3e2b3-e57d-e8cd-1366-01b9451f4afc" [ 964.595161] env[61978]: _type = "HttpNfcLease" [ 964.595161] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 964.595161] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a00f411-aeb4-4180-9ece-7c24ba94537b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.607504] env[61978]: DEBUG oslo_vmware.rw_handles [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a550c3-05aa-317d-e5d8-cebc30d028bb/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 964.608026] env[61978]: DEBUG oslo_vmware.rw_handles [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a550c3-05aa-317d-e5d8-cebc30d028bb/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 964.612169] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34cfe66-ab71-4b5d-b44a-156cd96d43b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.683541] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc235fb-253d-42ff-911b-eed9b8d3414f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.721516] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623f725e-7f6c-480b-b36c-b10f161d49bb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.732536] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394908, 'name': Rename_Task, 'duration_secs': 0.869305} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.733870] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836a9710-d995-48b6-9fc5-e1b26e78854b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.738124] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 964.740038] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96b4cc31-1289-4d09-a312-33bbc8c88549 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.743029] env[61978]: DEBUG nova.compute.manager [req-b70e4062-ec5f-46a3-9f90-fbd12ae63c96 req-11256176-2f81-4a35-959f-20e6d9f70ab6 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Received event network-changed-b04a501f-29a8-442a-9f2c-dddd76f5e335 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 964.743029] env[61978]: DEBUG nova.compute.manager [req-b70e4062-ec5f-46a3-9f90-fbd12ae63c96 req-11256176-2f81-4a35-959f-20e6d9f70ab6 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Refreshing instance network info cache due to event network-changed-b04a501f-29a8-442a-9f2c-dddd76f5e335. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 964.743299] env[61978]: DEBUG oslo_concurrency.lockutils [req-b70e4062-ec5f-46a3-9f90-fbd12ae63c96 req-11256176-2f81-4a35-959f-20e6d9f70ab6 service nova] Acquiring lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.758522] env[61978]: DEBUG nova.compute.provider_tree [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.764022] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 964.764022] env[61978]: value = "task-1394910" [ 964.764022] env[61978]: _type = "Task" [ 964.764022] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.768550] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-eb317ef7-0893-433e-8e12-9ea81fe34b22 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.780194] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394910, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.810144] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.810144] env[61978]: DEBUG nova.compute.manager [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Instance network_info: |[{"id": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "address": "fa:16:3e:62:14:fb", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb04a501f-29", "ovs_interfaceid": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 964.810399] env[61978]: DEBUG oslo_concurrency.lockutils [req-b70e4062-ec5f-46a3-9f90-fbd12ae63c96 req-11256176-2f81-4a35-959f-20e6d9f70ab6 service nova] Acquired lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.810675] env[61978]: DEBUG nova.network.neutron [req-b70e4062-ec5f-46a3-9f90-fbd12ae63c96 req-11256176-2f81-4a35-959f-20e6d9f70ab6 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Refreshing network info cache for port b04a501f-29a8-442a-9f2c-dddd76f5e335 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 964.812042] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:14:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b04a501f-29a8-442a-9f2c-dddd76f5e335', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 964.820785] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Creating folder: Project (e3a4f29a959447159b2f7d194ea94873). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 964.821858] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-850e7388-d7e7-42e7-ac1b-354112279546 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.835817] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Created folder: Project (e3a4f29a959447159b2f7d194ea94873) in parent group-v295764. [ 964.836144] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Creating folder: Instances. Parent ref: group-v295869. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 964.836753] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c798c0da-b523-4cc4-96e3-6fd218beb251 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.851060] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Created folder: Instances in parent group-v295869. [ 964.851397] env[61978]: DEBUG oslo.service.loopingcall [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 964.851610] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 964.851841] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33b8aebe-ec3f-4a61-93e4-9a6090678e84 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.876356] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 964.876356] env[61978]: value = "task-1394913" [ 964.876356] env[61978]: _type = "Task" [ 964.876356] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.887215] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394913, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.900756] env[61978]: DEBUG nova.compute.manager [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 965.046383] env[61978]: DEBUG nova.virt.hardware [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 965.047139] env[61978]: DEBUG nova.virt.hardware [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 965.048150] env[61978]: DEBUG nova.virt.hardware [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.048735] env[61978]: DEBUG nova.virt.hardware [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 965.048735] env[61978]: DEBUG nova.virt.hardware [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.048864] env[61978]: DEBUG nova.virt.hardware [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 965.049764] env[61978]: DEBUG nova.virt.hardware [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 965.049764] env[61978]: DEBUG nova.virt.hardware [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 965.049764] env[61978]: DEBUG nova.virt.hardware [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 965.049764] env[61978]: DEBUG nova.virt.hardware [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 965.049930] env[61978]: DEBUG nova.virt.hardware [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 965.050773] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9d2ab1-00da-4a15-9b64-4248515f1586 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.061492] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d803c99-6ddb-46b7-98ba-664bbae66def {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.265724] env[61978]: DEBUG nova.scheduler.client.report [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 965.286930] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394910, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.390037] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394913, 'name': CreateVM_Task, 'duration_secs': 0.435529} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.393140] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.394275] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.394801] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.395309] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 965.396272] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc98aee9-7547-450a-aae9-0c61aeaebc1a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.405953] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 965.405953] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521b1992-6e11-9530-bbd3-88383dcea3f6" [ 965.405953] env[61978]: _type = "Task" [ 965.405953] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.419672] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521b1992-6e11-9530-bbd3-88383dcea3f6, 'name': SearchDatastore_Task, 'duration_secs': 0.013563} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.422602] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.423187] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 965.423433] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.423656] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.423902] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 965.424344] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d40631c3-ccf1-4114-ae63-ea403d555c8b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.436817] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 965.436966] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 965.437918] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b200af8-5af5-428a-a1ef-e9fbd88dede0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.445429] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 965.445429] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e5be70-2283-5b6e-f731-48f3f7653937" [ 965.445429] env[61978]: _type = "Task" [ 965.445429] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.461125] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e5be70-2283-5b6e-f731-48f3f7653937, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.634813] env[61978]: DEBUG nova.network.neutron [req-b70e4062-ec5f-46a3-9f90-fbd12ae63c96 req-11256176-2f81-4a35-959f-20e6d9f70ab6 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Updated VIF entry in instance network info cache for port b04a501f-29a8-442a-9f2c-dddd76f5e335. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 965.635754] env[61978]: DEBUG nova.network.neutron [req-b70e4062-ec5f-46a3-9f90-fbd12ae63c96 req-11256176-2f81-4a35-959f-20e6d9f70ab6 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Updating instance_info_cache with network_info: [{"id": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "address": "fa:16:3e:62:14:fb", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb04a501f-29", "ovs_interfaceid": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.774845] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.912s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.775264] env[61978]: DEBUG nova.compute.manager [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 965.783021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.402s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.783021] env[61978]: DEBUG nova.objects.instance [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Lazy-loading 'resources' on Instance uuid b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.795424] env[61978]: DEBUG oslo_vmware.api [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394910, 'name': PowerOnVM_Task, 'duration_secs': 0.802253} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.796124] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 965.796448] env[61978]: INFO nova.compute.manager [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Took 8.54 seconds to spawn the instance on the hypervisor. [ 965.796827] env[61978]: DEBUG nova.compute.manager [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 965.797716] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579edf97-4868-437a-a32b-984f3259f308 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.956839] env[61978]: DEBUG nova.network.neutron [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Successfully updated port: 461cf97f-d4c1-4a04-bc0f-ea10c52ecce3 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 965.965813] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e5be70-2283-5b6e-f731-48f3f7653937, 'name': SearchDatastore_Task, 'duration_secs': 0.013237} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.968025] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b24649f-e052-4d0d-a773-060d3c2e8e6e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.976875] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 965.976875] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522e9a51-e7e4-6e04-b65d-b03966e0464c" [ 965.976875] env[61978]: _type = "Task" [ 965.976875] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.987415] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522e9a51-e7e4-6e04-b65d-b03966e0464c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.108436] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquiring lock "aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.108749] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lock "aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.138857] env[61978]: DEBUG oslo_concurrency.lockutils [req-b70e4062-ec5f-46a3-9f90-fbd12ae63c96 req-11256176-2f81-4a35-959f-20e6d9f70ab6 service nova] Releasing lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.288745] env[61978]: DEBUG nova.compute.utils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 966.290317] env[61978]: DEBUG nova.compute.manager [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 966.290491] env[61978]: DEBUG nova.network.neutron [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 966.326802] env[61978]: INFO nova.compute.manager [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Took 38.08 seconds to build instance. [ 966.350471] env[61978]: DEBUG nova.policy [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '031a98d4e0f345c28fd226142db35516', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86f4ae0b29af4ee2b33e5a499cf1e899', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 966.460156] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "refresh_cache-8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.461702] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "refresh_cache-8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.461702] env[61978]: DEBUG nova.network.neutron [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 966.493640] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522e9a51-e7e4-6e04-b65d-b03966e0464c, 'name': SearchDatastore_Task, 'duration_secs': 0.012266} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.494244] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.494803] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0/9bee3e66-93b5-4c0f-bb46-8fbd78c312c0.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 966.495929] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a6512b1-2356-41d8-ae14-8586f484007f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.514351] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 966.514351] env[61978]: value = "task-1394914" [ 966.514351] env[61978]: _type = "Task" [ 966.514351] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.525622] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1394914, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.659829] env[61978]: DEBUG nova.network.neutron [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Successfully created port: 4c3e3550-3780-4cf8-b191-9a82b2f340f2 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 966.797133] env[61978]: DEBUG nova.compute.manager [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 966.830433] env[61978]: DEBUG oslo_concurrency.lockutils [None req-45039e89-13e9-4369-b213-bd3293585589 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "8a21e6a7-c34e-4af0-b1fd-8a501694614c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.551s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.844585] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e97f7b-4aae-42c7-b432-d9bcd31218be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.854052] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647dc1ac-4ef2-4241-8be0-d8c215596659 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.899971] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a1e314-6b02-4dc8-ad3a-1155001725a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.909914] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebe276d-42b1-448a-bf80-1812154bbdc4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.927155] env[61978]: DEBUG nova.compute.provider_tree [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.013728] env[61978]: DEBUG nova.network.neutron [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 967.028060] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1394914, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.167459] env[61978]: DEBUG nova.network.neutron [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Updating instance_info_cache with network_info: [{"id": "461cf97f-d4c1-4a04-bc0f-ea10c52ecce3", "address": "fa:16:3e:33:87:13", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap461cf97f-d4", "ovs_interfaceid": "461cf97f-d4c1-4a04-bc0f-ea10c52ecce3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.333468] env[61978]: DEBUG nova.compute.manager [None req-311e7218-989f-4716-82bb-d0d4160fe09f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 23e2a460-a59f-46b8-bd29-153d6d6a5c92] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 967.430545] env[61978]: DEBUG nova.scheduler.client.report [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 967.526971] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1394914, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617269} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.527310] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0/9bee3e66-93b5-4c0f-bb46-8fbd78c312c0.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 967.527542] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 967.527807] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba80a9f6-875e-427a-98d5-cec844c79824 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.544160] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 967.544160] env[61978]: value = "task-1394915" [ 967.544160] env[61978]: _type = "Task" [ 967.544160] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.555393] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1394915, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.671272] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "refresh_cache-8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.671557] env[61978]: DEBUG nova.compute.manager [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Instance network_info: |[{"id": "461cf97f-d4c1-4a04-bc0f-ea10c52ecce3", "address": "fa:16:3e:33:87:13", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap461cf97f-d4", "ovs_interfaceid": "461cf97f-d4c1-4a04-bc0f-ea10c52ecce3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 967.672135] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:87:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '461cf97f-d4c1-4a04-bc0f-ea10c52ecce3', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 967.680034] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Creating folder: Project (86f4ae0b29af4ee2b33e5a499cf1e899). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 967.680357] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f50168b-1d13-46ec-9f1c-e263c8ade189 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.694668] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Created folder: Project (86f4ae0b29af4ee2b33e5a499cf1e899) in parent group-v295764. [ 967.694943] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Creating folder: Instances. Parent ref: group-v295872. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 967.695233] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4662eae-505f-4fb9-9395-2638b465cc82 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.708964] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Created folder: Instances in parent group-v295872. [ 967.709268] env[61978]: DEBUG oslo.service.loopingcall [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 967.709469] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 967.709709] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bedcc3be-1cdb-41ac-9dd9-a23ddbdbf0ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.733051] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 967.733051] env[61978]: value = "task-1394918" [ 967.733051] env[61978]: _type = "Task" [ 967.733051] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.742513] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394918, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.808151] env[61978]: DEBUG nova.compute.manager [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 967.843057] env[61978]: DEBUG nova.compute.manager [None req-311e7218-989f-4716-82bb-d0d4160fe09f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 23e2a460-a59f-46b8-bd29-153d6d6a5c92] Instance disappeared before build. {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 967.936832] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.157s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.939340] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 29.622s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.939527] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.939682] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 967.940517] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.485s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.940753] env[61978]: DEBUG nova.objects.instance [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lazy-loading 'resources' on Instance uuid 78b78ae7-74fe-4403-be9b-229abe6a7353 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.942987] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e4ef09-8128-4167-82ee-86ef69a1793f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.953138] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1381b5-a031-4046-bbde-1b2dbc7d89dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.971258] env[61978]: INFO nova.scheduler.client.report [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Deleted allocations for instance b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14 [ 967.975167] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ada35d9-32a5-48ba-914c-bd3d9b06c8d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.984258] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb93b022-b2af-4bfd-82fe-0e96fa6e20d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.016226] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178998MB free_disk=184GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 968.016724] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.054865] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1394915, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099113} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.055184] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 968.056089] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef99f16-bc0e-4247-8489-23125c3f36ed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.081165] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0/9bee3e66-93b5-4c0f-bb46-8fbd78c312c0.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 968.081550] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab10a4f4-c3eb-423a-82ca-0a061fd617c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.104937] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 968.104937] env[61978]: value = "task-1394919" [ 968.104937] env[61978]: _type = "Task" [ 968.104937] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.114943] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1394919, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.248410] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394918, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.360894] env[61978]: DEBUG oslo_concurrency.lockutils [None req-311e7218-989f-4716-82bb-d0d4160fe09f tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "23e2a460-a59f-46b8-bd29-153d6d6a5c92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.631s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.478587] env[61978]: DEBUG nova.network.neutron [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Successfully updated port: 4c3e3550-3780-4cf8-b191-9a82b2f340f2 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 968.483938] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1ce8d02-8096-44bc-a89b-076d84ac236a tempest-ServersNegativeTestMultiTenantJSON-2084340310 tempest-ServersNegativeTestMultiTenantJSON-2084340310-project-member] Lock "b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.053s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.617714] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1394919, 'name': ReconfigVM_Task, 'duration_secs': 0.411189} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.620367] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0/9bee3e66-93b5-4c0f-bb46-8fbd78c312c0.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 968.621193] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-459a177b-30d1-41db-a565-801b605c12e2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.629644] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 968.629644] env[61978]: value = "task-1394920" [ 968.629644] env[61978]: _type = "Task" [ 968.629644] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.642074] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1394920, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.746793] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394918, 'name': CreateVM_Task, 'duration_secs': 0.577266} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.746959] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 968.747693] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.747871] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.748226] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 968.748499] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a63b4ed9-aa9c-4342-b776-4a9b6381fe50 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.756996] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 968.756996] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e05518-942c-95c8-44ff-f3b800e5d4ec" [ 968.756996] env[61978]: _type = "Task" [ 968.756996] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.770583] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e05518-942c-95c8-44ff-f3b800e5d4ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.864130] env[61978]: DEBUG nova.compute.manager [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 968.888761] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a15419-1ae7-4728-bbcd-2c68ebd4ea4a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.905156] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d884df8-2b7b-4ed2-84cb-f80061084c53 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.934749] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01049ee-191b-4c28-939c-699cb554c6c2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.946357] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3b6c5c-0082-4f03-9aa0-7a4f3cd12daa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.962931] env[61978]: DEBUG nova.compute.provider_tree [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 968.984884] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "refresh_cache-bdfdd685-e440-4f53-b6c4-2ee2f06acba8" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.985118] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "refresh_cache-bdfdd685-e440-4f53-b6c4-2ee2f06acba8" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.985212] env[61978]: DEBUG nova.network.neutron [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.140883] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1394920, 'name': Rename_Task, 'duration_secs': 0.182186} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.141219] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 969.141489] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4316a950-6f10-4949-8ac6-87485b1c3dd7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.149444] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 969.149444] env[61978]: value = "task-1394921" [ 969.149444] env[61978]: _type = "Task" [ 969.149444] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.158582] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1394921, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.268894] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e05518-942c-95c8-44ff-f3b800e5d4ec, 'name': SearchDatastore_Task, 'duration_secs': 0.015196} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.269259] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.269491] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 969.269772] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.269932] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.270149] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 969.270449] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32def8fa-68c4-40a3-bf8d-1c0523a3d098 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.281218] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 969.281218] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 969.282140] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d2169b8-e34d-4853-bb93-5d438150387a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.289047] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 969.289047] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]525e4cc4-fd58-a5d7-b691-38c918f2d5b2" [ 969.289047] env[61978]: _type = "Task" [ 969.289047] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.300067] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]525e4cc4-fd58-a5d7-b691-38c918f2d5b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.389807] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.490820] env[61978]: ERROR nova.scheduler.client.report [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] [req-391d05f6-3e0a-4cda-9a20-492ea2e90963] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-391d05f6-3e0a-4cda-9a20-492ea2e90963"}]} [ 969.508852] env[61978]: DEBUG nova.scheduler.client.report [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 969.520197] env[61978]: DEBUG nova.network.neutron [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 969.524941] env[61978]: DEBUG nova.scheduler.client.report [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 969.525288] env[61978]: DEBUG nova.compute.provider_tree [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 969.539035] env[61978]: DEBUG nova.scheduler.client.report [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 969.563134] env[61978]: DEBUG nova.scheduler.client.report [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 969.665039] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1394921, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.712135] env[61978]: DEBUG nova.network.neutron [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Updating instance_info_cache with network_info: [{"id": "4c3e3550-3780-4cf8-b191-9a82b2f340f2", "address": "fa:16:3e:4f:d0:71", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3e3550-37", "ovs_interfaceid": "4c3e3550-3780-4cf8-b191-9a82b2f340f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.805096] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]525e4cc4-fd58-a5d7-b691-38c918f2d5b2, 'name': SearchDatastore_Task, 'duration_secs': 0.012222} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.806130] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fef376c9-bdba-49ee-bcba-0075d820a979 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.813534] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 969.813534] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521cd4af-ce97-c659-50ed-36bbd50e2cec" [ 969.813534] env[61978]: _type = "Task" [ 969.813534] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.830215] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521cd4af-ce97-c659-50ed-36bbd50e2cec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.040919] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391c8628-f74b-4092-b494-5b915096e826 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.049964] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3981332-d90f-4475-9681-0d3b1b4b98ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.082704] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b31812-6686-4ea1-9999-d36c5c891019 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.092150] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fe960c-4a54-4113-baf8-8cec24d34c94 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.106924] env[61978]: DEBUG nova.compute.provider_tree [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 970.160879] env[61978]: DEBUG oslo_vmware.api [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1394921, 'name': PowerOnVM_Task, 'duration_secs': 0.554546} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.161201] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 970.161399] env[61978]: INFO nova.compute.manager [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Took 8.01 seconds to spawn the instance on the hypervisor. [ 970.161576] env[61978]: DEBUG nova.compute.manager [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 970.162427] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72106cc0-c7c4-431c-ad97-4fb245e055cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.218678] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "refresh_cache-bdfdd685-e440-4f53-b6c4-2ee2f06acba8" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.219104] env[61978]: DEBUG nova.compute.manager [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Instance network_info: |[{"id": "4c3e3550-3780-4cf8-b191-9a82b2f340f2", "address": "fa:16:3e:4f:d0:71", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3e3550-37", "ovs_interfaceid": "4c3e3550-3780-4cf8-b191-9a82b2f340f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 970.330313] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521cd4af-ce97-c659-50ed-36bbd50e2cec, 'name': SearchDatastore_Task, 'duration_secs': 0.018551} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.330820] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.331267] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7/8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 970.331673] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e9aede9-c49d-4f1d-a90b-13f3ddbe50c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.342996] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 970.342996] env[61978]: value = "task-1394922" [ 970.342996] env[61978]: _type = "Task" [ 970.342996] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.356928] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.641508] env[61978]: DEBUG nova.scheduler.client.report [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 59 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 970.641802] env[61978]: DEBUG nova.compute.provider_tree [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 59 to 60 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 970.642031] env[61978]: DEBUG nova.compute.provider_tree [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 970.681754] env[61978]: INFO nova.compute.manager [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Took 41.33 seconds to build instance. [ 970.857144] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394922, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.928075] env[61978]: DEBUG nova.virt.hardware [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 970.928364] env[61978]: DEBUG nova.virt.hardware [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 970.928528] env[61978]: DEBUG nova.virt.hardware [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.928715] env[61978]: DEBUG nova.virt.hardware [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 970.929062] env[61978]: DEBUG nova.virt.hardware [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.929370] env[61978]: DEBUG nova.virt.hardware [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 970.929664] env[61978]: DEBUG nova.virt.hardware [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 970.929863] env[61978]: DEBUG nova.virt.hardware [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 970.930068] env[61978]: DEBUG nova.virt.hardware [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 970.930248] env[61978]: DEBUG nova.virt.hardware [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 970.930431] env[61978]: DEBUG nova.virt.hardware [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.932861] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c349db-f30b-4df6-993c-9b69f97970bb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.943801] env[61978]: DEBUG oslo_vmware.rw_handles [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527965c6-13c0-3726-4042-2cc153bda0b9/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 970.945305] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0adbfeb-5ac6-43e4-af2c-d1b7903d81b1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.950588] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7858c301-5a85-4d82-a42c-a3792e22d694 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.966575] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:d0:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c3e3550-3780-4cf8-b191-9a82b2f340f2', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 970.975205] env[61978]: DEBUG oslo.service.loopingcall [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 970.977306] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 970.977616] env[61978]: DEBUG oslo_vmware.rw_handles [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527965c6-13c0-3726-4042-2cc153bda0b9/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 970.977801] env[61978]: ERROR oslo_vmware.rw_handles [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527965c6-13c0-3726-4042-2cc153bda0b9/disk-0.vmdk due to incomplete transfer. [ 970.978127] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa2f13ac-cfdd-45ec-af24-d14ff1342ed2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.992909] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-022867ec-58b7-4a2c-87f1-46e0458c4e27 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.003486] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 971.003486] env[61978]: value = "task-1394923" [ 971.003486] env[61978]: _type = "Task" [ 971.003486] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.011217] env[61978]: DEBUG oslo_vmware.rw_handles [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527965c6-13c0-3726-4042-2cc153bda0b9/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 971.011523] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Uploaded image 7e38f3f1-4b65-4030-8b1a-af8f828b3d96 to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 971.014597] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 971.015091] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-31bd820f-d284-4532-b236-8bffe6ba585e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.024343] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394923, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.026103] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 971.026103] env[61978]: value = "task-1394924" [ 971.026103] env[61978]: _type = "Task" [ 971.026103] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.040093] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394924, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.147577] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.207s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.150364] env[61978]: DEBUG oslo_concurrency.lockutils [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.288s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.150936] env[61978]: DEBUG nova.objects.instance [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lazy-loading 'resources' on Instance uuid e9e2deb5-5bf9-4b57-832f-9928d3cda162 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 971.183102] env[61978]: INFO nova.scheduler.client.report [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Deleted allocations for instance 78b78ae7-74fe-4403-be9b-229abe6a7353 [ 971.184275] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3e4fb442-2bb0-4825-a498-d1ec6e284c59 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.861s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.354769] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394922, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.707752} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.355129] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7/8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 971.355287] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 971.355832] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08626d7b-5762-4fc5-8db1-d59bb73dbb4c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.364123] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 971.364123] env[61978]: value = "task-1394925" [ 971.364123] env[61978]: _type = "Task" [ 971.364123] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.375493] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394925, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.521398] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fb5ccbef-e3a5-488b-9a48-704e85a042ee tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.522374] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fb5ccbef-e3a5-488b-9a48-704e85a042ee tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.002s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.522971] env[61978]: DEBUG nova.compute.manager [None req-fb5ccbef-e3a5-488b-9a48-704e85a042ee tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 971.523635] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394923, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.524969] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945dd795-05b7-4ac8-90dd-ebe3eba86e11 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.540735] env[61978]: DEBUG nova.compute.manager [None req-fb5ccbef-e3a5-488b-9a48-704e85a042ee tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61978) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 971.541720] env[61978]: DEBUG nova.objects.instance [None req-fb5ccbef-e3a5-488b-9a48-704e85a042ee tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lazy-loading 'flavor' on Instance uuid b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 971.549139] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394924, 'name': Destroy_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.550118] env[61978]: DEBUG nova.compute.manager [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Received event network-vif-plugged-461cf97f-d4c1-4a04-bc0f-ea10c52ecce3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 971.550547] env[61978]: DEBUG oslo_concurrency.lockutils [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] Acquiring lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.550580] env[61978]: DEBUG oslo_concurrency.lockutils [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] Lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.550745] env[61978]: DEBUG oslo_concurrency.lockutils [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] Lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.550919] env[61978]: DEBUG nova.compute.manager [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] No waiting events found dispatching network-vif-plugged-461cf97f-d4c1-4a04-bc0f-ea10c52ecce3 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 971.551103] env[61978]: WARNING nova.compute.manager [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Received unexpected event network-vif-plugged-461cf97f-d4c1-4a04-bc0f-ea10c52ecce3 for instance with vm_state building and task_state spawning. [ 971.551336] env[61978]: DEBUG nova.compute.manager [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Received event network-changed-461cf97f-d4c1-4a04-bc0f-ea10c52ecce3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 971.551440] env[61978]: DEBUG nova.compute.manager [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Refreshing instance network info cache due to event network-changed-461cf97f-d4c1-4a04-bc0f-ea10c52ecce3. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 971.551627] env[61978]: DEBUG oslo_concurrency.lockutils [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] Acquiring lock "refresh_cache-8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.551767] env[61978]: DEBUG oslo_concurrency.lockutils [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] Acquired lock "refresh_cache-8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.551928] env[61978]: DEBUG nova.network.neutron [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Refreshing network info cache for port 461cf97f-d4c1-4a04-bc0f-ea10c52ecce3 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 971.691588] env[61978]: DEBUG nova.compute.manager [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 971.696698] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d7502c6-e2ab-4a2b-9396-0fb16648f179 tempest-InstanceActionsTestJSON-1122201354 tempest-InstanceActionsTestJSON-1122201354-project-member] Lock "78b78ae7-74fe-4403-be9b-229abe6a7353" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.291s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.882927] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394925, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.291146} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.883896] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 971.885712] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757b0cf4-b7b1-4f38-bd95-3d105a821029 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.919568] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7/8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.924867] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7abe118-14aa-4265-8ed1-d587cec0a86e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.952017] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 971.952017] env[61978]: value = "task-1394926" [ 971.952017] env[61978]: _type = "Task" [ 971.952017] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.959396] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] Acquiring lock "refresh_cache-8a21e6a7-c34e-4af0-b1fd-8a501694614c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.959396] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] Acquired lock "refresh_cache-8a21e6a7-c34e-4af0-b1fd-8a501694614c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.959396] env[61978]: DEBUG nova.network.neutron [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 971.970543] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394926, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.020133] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394923, 'name': CreateVM_Task, 'duration_secs': 0.852249} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.024139] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 972.024371] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.024514] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.025200] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 972.025297] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b375e4e9-33d8-4d28-b0d8-7a30718a419d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.035805] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 972.035805] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5233d14e-9d00-8ee5-6473-ed55ea196a69" [ 972.035805] env[61978]: _type = "Task" [ 972.035805] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.046318] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394924, 'name': Destroy_Task, 'duration_secs': 0.939737} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.048032] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Destroyed the VM [ 972.048032] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 972.050901] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-36df8775-9620-46bd-a2ba-47f068c82dc4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.053506] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb5ccbef-e3a5-488b-9a48-704e85a042ee tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.059598] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab39fb44-6fb1-46cf-b00f-63c8279d0046 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.063246] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5233d14e-9d00-8ee5-6473-ed55ea196a69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.070138] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 972.070138] env[61978]: value = "task-1394927" [ 972.070138] env[61978]: _type = "Task" [ 972.070138] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.074833] env[61978]: DEBUG oslo_vmware.api [None req-fb5ccbef-e3a5-488b-9a48-704e85a042ee tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 972.074833] env[61978]: value = "task-1394928" [ 972.074833] env[61978]: _type = "Task" [ 972.074833] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.088903] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394927, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.093011] env[61978]: DEBUG oslo_vmware.api [None req-fb5ccbef-e3a5-488b-9a48-704e85a042ee tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.222143] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.252416] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "371ddf66-a39b-41c4-bbd1-2a1c1b99834e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.252827] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "371ddf66-a39b-41c4-bbd1-2a1c1b99834e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.253104] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "371ddf66-a39b-41c4-bbd1-2a1c1b99834e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.253304] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "371ddf66-a39b-41c4-bbd1-2a1c1b99834e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.253480] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "371ddf66-a39b-41c4-bbd1-2a1c1b99834e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.258817] env[61978]: INFO nova.compute.manager [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Terminating instance [ 972.263642] env[61978]: DEBUG nova.compute.manager [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 972.263876] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 972.266095] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5caf05-4730-4279-b947-52b2dd5345dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.276718] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.277049] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2992f217-d5e8-46be-8ee8-8ed803204bb4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.285576] env[61978]: DEBUG oslo_vmware.api [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 972.285576] env[61978]: value = "task-1394929" [ 972.285576] env[61978]: _type = "Task" [ 972.285576] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.304502] env[61978]: DEBUG oslo_vmware.api [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394929, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.317648] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9599a8c1-8d7b-4ce6-a1d6-f18e2b8e6f72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.326743] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796dad27-903e-4dbd-a497-ce483f2a99cb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.363553] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d193ff-f5e7-44dd-aa9e-e8140de52334 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.376095] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426282f2-41c8-4502-b388-b61c5e3b81c6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.392673] env[61978]: DEBUG nova.compute.provider_tree [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.463388] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.552966] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5233d14e-9d00-8ee5-6473-ed55ea196a69, 'name': SearchDatastore_Task, 'duration_secs': 0.023459} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.552966] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.552966] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 972.553462] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.553462] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.553462] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 972.559193] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb69bd58-4859-4922-9be2-25818f01a098 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.569734] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 972.570059] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 972.571172] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42d73b2e-51b2-4cb3-817c-24c4bc4e7513 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.579454] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 972.579454] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524faab6-fa2a-4600-d6e1-365fcdc16324" [ 972.579454] env[61978]: _type = "Task" [ 972.579454] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.590491] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394927, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.601743] env[61978]: DEBUG oslo_vmware.api [None req-fb5ccbef-e3a5-488b-9a48-704e85a042ee tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394928, 'name': PowerOffVM_Task, 'duration_secs': 0.233401} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.601743] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524faab6-fa2a-4600-d6e1-365fcdc16324, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.602054] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb5ccbef-e3a5-488b-9a48-704e85a042ee tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 972.602054] env[61978]: DEBUG nova.compute.manager [None req-fb5ccbef-e3a5-488b-9a48-704e85a042ee tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 972.603033] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee6685c-075a-41ca-9d9e-d25b5cd26f89 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.777920] env[61978]: DEBUG nova.network.neutron [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Updated VIF entry in instance network info cache for port 461cf97f-d4c1-4a04-bc0f-ea10c52ecce3. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 972.778918] env[61978]: DEBUG nova.network.neutron [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Updating instance_info_cache with network_info: [{"id": "461cf97f-d4c1-4a04-bc0f-ea10c52ecce3", "address": "fa:16:3e:33:87:13", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap461cf97f-d4", "ovs_interfaceid": "461cf97f-d4c1-4a04-bc0f-ea10c52ecce3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.798653] env[61978]: DEBUG oslo_vmware.api [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394929, 'name': PowerOffVM_Task, 'duration_secs': 0.469981} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.799057] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 972.799273] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 972.799574] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e3bbd16-90d4-4775-b76c-e5091c0b468b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.893907] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 972.894503] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 972.894629] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Deleting the datastore file [datastore2] 371ddf66-a39b-41c4-bbd1-2a1c1b99834e {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 972.898258] env[61978]: DEBUG nova.scheduler.client.report [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 972.903787] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5675442b-1f7a-4cef-8dde-e6baedfc561e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.925298] env[61978]: DEBUG oslo_vmware.api [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 972.925298] env[61978]: value = "task-1394931" [ 972.925298] env[61978]: _type = "Task" [ 972.925298] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.951146] env[61978]: DEBUG oslo_vmware.api [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394931, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.969516] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.096151] env[61978]: DEBUG oslo_vmware.api [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394927, 'name': RemoveSnapshot_Task, 'duration_secs': 0.672255} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.096964] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 973.097559] env[61978]: INFO nova.compute.manager [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Took 15.97 seconds to snapshot the instance on the hypervisor. [ 973.109428] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524faab6-fa2a-4600-d6e1-365fcdc16324, 'name': SearchDatastore_Task, 'duration_secs': 0.024752} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.115784] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d5c09eb-4407-4135-8cad-ed8eafa0eaed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.119458] env[61978]: DEBUG nova.network.neutron [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Updating instance_info_cache with network_info: [{"id": "cc183679-2e0d-4d97-9429-82606794bea3", "address": "fa:16:3e:6d:c1:68", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc183679-2e", "ovs_interfaceid": "cc183679-2e0d-4d97-9429-82606794bea3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.126332] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fb5ccbef-e3a5-488b-9a48-704e85a042ee tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.603s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.129013] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 973.129013] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ecf113-c644-929b-7816-53e448efa174" [ 973.129013] env[61978]: _type = "Task" [ 973.129013] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.143387] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ecf113-c644-929b-7816-53e448efa174, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.282791] env[61978]: DEBUG oslo_concurrency.lockutils [req-918fe377-f51f-47db-8262-da6ae2cb340a req-d4e3ec16-4762-4525-9be7-5c1d56c157fc service nova] Releasing lock "refresh_cache-8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.415187] env[61978]: DEBUG oslo_concurrency.lockutils [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.263s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.416865] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.342s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.418207] env[61978]: INFO nova.compute.claims [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 973.426045] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "f22e097d-f1a5-414a-82cc-ab455db876c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.426288] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "f22e097d-f1a5-414a-82cc-ab455db876c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.427051] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "f22e097d-f1a5-414a-82cc-ab455db876c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.427051] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "f22e097d-f1a5-414a-82cc-ab455db876c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.427051] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "f22e097d-f1a5-414a-82cc-ab455db876c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.430371] env[61978]: INFO nova.compute.manager [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Terminating instance [ 973.436752] env[61978]: DEBUG nova.compute.manager [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 973.438014] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.438413] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62508fc7-e8fc-4d40-a7c8-2b2f7926fa02 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.443588] env[61978]: INFO nova.scheduler.client.report [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Deleted allocations for instance e9e2deb5-5bf9-4b57-832f-9928d3cda162 [ 973.456952] env[61978]: DEBUG oslo_vmware.api [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1394931, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226892} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.459846] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.461208] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 973.462309] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 973.462309] env[61978]: INFO nova.compute.manager [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Took 1.20 seconds to destroy the instance on the hypervisor. [ 973.463118] env[61978]: DEBUG oslo.service.loopingcall [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.469287] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.470635] env[61978]: DEBUG nova.compute.manager [-] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 973.470733] env[61978]: DEBUG nova.network.neutron [-] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 973.472889] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-adb5ce91-6c5c-4364-b1b0-4e6746127e1d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.488846] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394926, 'name': ReconfigVM_Task, 'duration_secs': 1.105805} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.488846] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7/8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.489049] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-13da1aac-a535-42f8-9fe8-2c26d4797385 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.500542] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 973.500542] env[61978]: value = "task-1394933" [ 973.500542] env[61978]: _type = "Task" [ 973.500542] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.515912] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394933, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.543697] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.544025] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.567189] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.567464] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.567650] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleting the datastore file [datastore2] f22e097d-f1a5-414a-82cc-ab455db876c7 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.568017] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-649a8850-fbb2-470c-a027-78c29665639c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.576142] env[61978]: DEBUG oslo_vmware.api [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 973.576142] env[61978]: value = "task-1394934" [ 973.576142] env[61978]: _type = "Task" [ 973.576142] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.589931] env[61978]: DEBUG oslo_vmware.api [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394934, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.613991] env[61978]: DEBUG nova.compute.manager [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Instance disappeared during snapshot {{(pid=61978) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 973.624139] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] Releasing lock "refresh_cache-8a21e6a7-c34e-4af0-b1fd-8a501694614c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.624397] env[61978]: DEBUG nova.compute.manager [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Inject network info {{(pid=61978) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7219}} [ 973.624944] env[61978]: DEBUG nova.compute.manager [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] network_info to inject: |[{"id": "cc183679-2e0d-4d97-9429-82606794bea3", "address": "fa:16:3e:6d:c1:68", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc183679-2e", "ovs_interfaceid": "cc183679-2e0d-4d97-9429-82606794bea3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 973.629826] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Reconfiguring VM instance to set the machine id {{(pid=61978) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 973.631201] env[61978]: DEBUG nova.compute.manager [None req-fff405bf-8666-4cb2-977c-3d17a85b74f3 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Image not found during clean up 7e38f3f1-4b65-4030-8b1a-af8f828b3d96 {{(pid=61978) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4500}} [ 973.632430] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ab5873b-33da-4594-96a3-62875d3e8d9f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.661383] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ecf113-c644-929b-7816-53e448efa174, 'name': SearchDatastore_Task, 'duration_secs': 0.016656} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.662989] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.663336] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] bdfdd685-e440-4f53-b6c4-2ee2f06acba8/bdfdd685-e440-4f53-b6c4-2ee2f06acba8.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 973.663682] env[61978]: DEBUG oslo_vmware.api [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] Waiting for the task: (returnval){ [ 973.663682] env[61978]: value = "task-1394935" [ 973.663682] env[61978]: _type = "Task" [ 973.663682] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.663879] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e7c272c-968f-4654-8a0a-ee196aaa58f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.680712] env[61978]: DEBUG oslo_vmware.api [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] Task: {'id': task-1394935, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.681158] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 973.681158] env[61978]: value = "task-1394936" [ 973.681158] env[61978]: _type = "Task" [ 973.681158] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.692743] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394936, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.965688] env[61978]: DEBUG oslo_concurrency.lockutils [None req-890b2cc6-62b2-4717-a0dd-132ba3cecc2d tempest-ServersAdminNegativeTestJSON-798110787 tempest-ServersAdminNegativeTestJSON-798110787-project-member] Lock "e9e2deb5-5bf9-4b57-832f-9928d3cda162" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.105s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.979800] env[61978]: DEBUG oslo_concurrency.lockutils [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Acquiring lock "e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.980187] env[61978]: DEBUG oslo_concurrency.lockutils [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Lock "e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.981856] env[61978]: DEBUG oslo_concurrency.lockutils [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Acquiring lock "e30d4a9f-1d75-453c-9552-2a0fbd4aa87d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.982181] env[61978]: DEBUG oslo_concurrency.lockutils [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Lock "e30d4a9f-1d75-453c-9552-2a0fbd4aa87d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.002s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.983081] env[61978]: DEBUG oslo_concurrency.lockutils [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Lock "e30d4a9f-1d75-453c-9552-2a0fbd4aa87d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.990350] env[61978]: INFO nova.compute.manager [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Terminating instance [ 973.995089] env[61978]: DEBUG nova.compute.manager [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 973.995440] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.996113] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c20fc272-d49a-4b6b-a32d-48d0c169d63c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.011019] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Waiting for the task: (returnval){ [ 974.011019] env[61978]: value = "task-1394937" [ 974.011019] env[61978]: _type = "Task" [ 974.011019] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.021031] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394933, 'name': Rename_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.031145] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394937, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.095035] env[61978]: DEBUG oslo_vmware.api [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1394934, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30904} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.096475] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.096693] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.096919] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.097112] env[61978]: INFO nova.compute.manager [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Took 0.66 seconds to destroy the instance on the hypervisor. [ 974.097406] env[61978]: DEBUG oslo.service.loopingcall [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.098661] env[61978]: DEBUG nova.compute.manager [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Received event network-vif-plugged-4c3e3550-3780-4cf8-b191-9a82b2f340f2 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 974.098887] env[61978]: DEBUG oslo_concurrency.lockutils [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] Acquiring lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.099136] env[61978]: DEBUG oslo_concurrency.lockutils [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] Lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.099341] env[61978]: DEBUG oslo_concurrency.lockutils [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] Lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.099524] env[61978]: DEBUG nova.compute.manager [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] No waiting events found dispatching network-vif-plugged-4c3e3550-3780-4cf8-b191-9a82b2f340f2 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 974.100127] env[61978]: WARNING nova.compute.manager [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Received unexpected event network-vif-plugged-4c3e3550-3780-4cf8-b191-9a82b2f340f2 for instance with vm_state building and task_state spawning. [ 974.100363] env[61978]: DEBUG nova.compute.manager [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Received event network-changed-4c3e3550-3780-4cf8-b191-9a82b2f340f2 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 974.100572] env[61978]: DEBUG nova.compute.manager [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Refreshing instance network info cache due to event network-changed-4c3e3550-3780-4cf8-b191-9a82b2f340f2. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 974.100877] env[61978]: DEBUG oslo_concurrency.lockutils [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] Acquiring lock "refresh_cache-bdfdd685-e440-4f53-b6c4-2ee2f06acba8" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.101040] env[61978]: DEBUG oslo_concurrency.lockutils [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] Acquired lock "refresh_cache-bdfdd685-e440-4f53-b6c4-2ee2f06acba8" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.101238] env[61978]: DEBUG nova.network.neutron [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Refreshing network info cache for port 4c3e3550-3780-4cf8-b191-9a82b2f340f2 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 974.102784] env[61978]: DEBUG nova.compute.manager [-] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 974.102911] env[61978]: DEBUG nova.network.neutron [-] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.180359] env[61978]: DEBUG oslo_vmware.api [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] Task: {'id': task-1394935, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.193106] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394936, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.328469] env[61978]: DEBUG oslo_vmware.rw_handles [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a550c3-05aa-317d-e5d8-cebc30d028bb/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 974.333058] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce12afe7-fca8-4111-b7bb-3031724d2ab3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.338035] env[61978]: DEBUG oslo_vmware.rw_handles [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a550c3-05aa-317d-e5d8-cebc30d028bb/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 974.338235] env[61978]: ERROR oslo_vmware.rw_handles [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a550c3-05aa-317d-e5d8-cebc30d028bb/disk-0.vmdk due to incomplete transfer. [ 974.338473] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a5e7d0cf-1380-448e-be32-66166cec6f9b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.347822] env[61978]: DEBUG oslo_vmware.rw_handles [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a550c3-05aa-317d-e5d8-cebc30d028bb/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 974.348052] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Uploaded image e2653038-5e10-426c-af6a-90f08b37f43d to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 974.350037] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 974.350037] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2b2bdf52-e5ff-4a5f-8f44-fc0a25f0f8f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.356764] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 974.356764] env[61978]: value = "task-1394938" [ 974.356764] env[61978]: _type = "Task" [ 974.356764] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.369754] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394938, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.517727] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394933, 'name': Rename_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.524065] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394937, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.687435] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "0d48ae5d-7cc8-42b3-a993-44636e9cb171" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.687887] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "0d48ae5d-7cc8-42b3-a993-44636e9cb171" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.688798] env[61978]: DEBUG oslo_vmware.api [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] Task: {'id': task-1394935, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.700997] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394936, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.703363} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.701453] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] bdfdd685-e440-4f53-b6c4-2ee2f06acba8/bdfdd685-e440-4f53-b6c4-2ee2f06acba8.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 974.701715] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 974.702365] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea2d706a-2c4a-4d87-9704-85bc417c89d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.710377] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 974.710377] env[61978]: value = "task-1394939" [ 974.710377] env[61978]: _type = "Task" [ 974.710377] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.722125] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394939, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.882870] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394938, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.024276] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394933, 'name': Rename_Task, 'duration_secs': 1.304276} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.028068] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 975.031836] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f7db056-0722-48bb-8a5e-d9447c5344e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.035802] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394937, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.037352] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 975.037352] env[61978]: value = "task-1394940" [ 975.037352] env[61978]: _type = "Task" [ 975.037352] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.058453] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394940, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.064151] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022352cd-14bf-4c7f-ac1c-d12b509e9d30 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.073991] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c99a33-46bc-48c8-bae9-d46f99b3bd0b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.113010] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a28e133-201e-44c6-a229-9e94ff93f105 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.127092] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef53705c-f8ee-4d03-b0a2-0c353074222a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.142038] env[61978]: DEBUG nova.compute.provider_tree [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.177188] env[61978]: DEBUG oslo_vmware.api [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] Task: {'id': task-1394935, 'name': ReconfigVM_Task, 'duration_secs': 1.266552} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.177568] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-35d37338-0cdc-4cfa-a7c9-76b20f8c757b tempest-ServersAdminTestJSON-1328224167 tempest-ServersAdminTestJSON-1328224167-project-admin] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Reconfigured VM instance to set the machine id {{(pid=61978) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 975.222250] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394939, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086022} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.222641] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 975.223439] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe5c161-748d-43ac-8701-dc87a9c70982 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.247220] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] bdfdd685-e440-4f53-b6c4-2ee2f06acba8/bdfdd685-e440-4f53-b6c4-2ee2f06acba8.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.248209] env[61978]: DEBUG nova.network.neutron [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Updated VIF entry in instance network info cache for port 4c3e3550-3780-4cf8-b191-9a82b2f340f2. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 975.248511] env[61978]: DEBUG nova.network.neutron [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Updating instance_info_cache with network_info: [{"id": "4c3e3550-3780-4cf8-b191-9a82b2f340f2", "address": "fa:16:3e:4f:d0:71", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3e3550-37", "ovs_interfaceid": "4c3e3550-3780-4cf8-b191-9a82b2f340f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.249704] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbd63af7-7acb-435f-81c7-2c4d7757c1f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.273364] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 975.273364] env[61978]: value = "task-1394941" [ 975.273364] env[61978]: _type = "Task" [ 975.273364] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.283065] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394941, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.372023] env[61978]: DEBUG nova.network.neutron [-] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.373560] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394938, 'name': Destroy_Task, 'duration_secs': 0.763841} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.373941] env[61978]: DEBUG nova.network.neutron [-] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.375321] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Destroyed the VM [ 975.375853] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 975.376606] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4d93e39e-b225-4110-9e2d-c3ba445cf6c6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.386322] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 975.386322] env[61978]: value = "task-1394942" [ 975.386322] env[61978]: _type = "Task" [ 975.386322] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.400077] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394942, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.439934] env[61978]: DEBUG nova.objects.instance [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lazy-loading 'flavor' on Instance uuid b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.533072] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394937, 'name': PowerOffVM_Task, 'duration_secs': 1.154698} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.533686] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 975.534132] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 975.534399] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295814', 'volume_id': '8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'name': 'volume-8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e30d4a9f-1d75-453c-9552-2a0fbd4aa87d', 'attached_at': '', 'detached_at': '', 'volume_id': '8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'serial': '8895fd52-1e47-4be8-bc86-5cb974e51fe1'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 975.535684] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8b6f3a-4153-4c40-908b-3c38ff828342 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.549367] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394940, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.566202] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a068360-737a-481c-8b7c-484d05259b3b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.574694] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d551c88e-1381-4c72-8a93-ed2c57219e8f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.600101] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf9b5c6-6c49-4ed8-9051-950403ca831f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.618899] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] The volume has not been displaced from its original location: [datastore2] volume-8895fd52-1e47-4be8-bc86-5cb974e51fe1/volume-8895fd52-1e47-4be8-bc86-5cb974e51fe1.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 975.625803] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Reconfiguring VM instance instance-0000001b to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 975.625803] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46af5b5f-c18b-4678-a895-d4b6c72c3372 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.646896] env[61978]: DEBUG nova.scheduler.client.report [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 975.659182] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Waiting for the task: (returnval){ [ 975.659182] env[61978]: value = "task-1394943" [ 975.659182] env[61978]: _type = "Task" [ 975.659182] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.672746] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394943, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.765738] env[61978]: DEBUG oslo_concurrency.lockutils [req-3b7e6eb1-4224-4e61-896e-dbb72b13c9fe req-8dfc7239-4f69-46f5-9642-a6f87eaf7ff7 service nova] Releasing lock "refresh_cache-bdfdd685-e440-4f53-b6c4-2ee2f06acba8" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.791816] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394941, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.878662] env[61978]: INFO nova.compute.manager [-] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Took 2.40 seconds to deallocate network for instance. [ 975.879210] env[61978]: INFO nova.compute.manager [-] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Took 1.78 seconds to deallocate network for instance. [ 975.909249] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394942, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.930037] env[61978]: DEBUG oslo_concurrency.lockutils [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Acquiring lock "ff793464-9bef-449f-8485-36d3b8fb1d69" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.930392] env[61978]: DEBUG oslo_concurrency.lockutils [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Lock "ff793464-9bef-449f-8485-36d3b8fb1d69" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.930683] env[61978]: DEBUG oslo_concurrency.lockutils [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Acquiring lock "ff793464-9bef-449f-8485-36d3b8fb1d69-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.930807] env[61978]: DEBUG oslo_concurrency.lockutils [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Lock "ff793464-9bef-449f-8485-36d3b8fb1d69-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.931076] env[61978]: DEBUG oslo_concurrency.lockutils [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Lock "ff793464-9bef-449f-8485-36d3b8fb1d69-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.935877] env[61978]: INFO nova.compute.manager [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Terminating instance [ 975.941400] env[61978]: DEBUG nova.compute.manager [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 975.941400] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 975.941400] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa81879d-8e5a-49a0-86b1-dfe34b88d41c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.948940] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "refresh_cache-b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.949192] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquired lock "refresh_cache-b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.949414] env[61978]: DEBUG nova.network.neutron [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 975.949640] env[61978]: DEBUG nova.objects.instance [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lazy-loading 'info_cache' on Instance uuid b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.959579] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 975.960060] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a821ea5-35da-46b1-9023-a6fefd6c3022 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.973462] env[61978]: DEBUG oslo_vmware.api [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Waiting for the task: (returnval){ [ 975.973462] env[61978]: value = "task-1394944" [ 975.973462] env[61978]: _type = "Task" [ 975.973462] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.986321] env[61978]: DEBUG oslo_vmware.api [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394944, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.049812] env[61978]: DEBUG oslo_vmware.api [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394940, 'name': PowerOnVM_Task, 'duration_secs': 0.555563} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.050113] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 976.050329] env[61978]: INFO nova.compute.manager [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Took 11.15 seconds to spawn the instance on the hypervisor. [ 976.050514] env[61978]: DEBUG nova.compute.manager [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 976.051318] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d554c9d2-035f-46f2-a581-ead8688a9b64 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.155152] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.739s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.155894] env[61978]: DEBUG nova.compute.manager [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 976.159083] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.172s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.161269] env[61978]: INFO nova.compute.claims [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 976.175735] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394943, 'name': ReconfigVM_Task, 'duration_secs': 0.234921} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.175735] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Reconfigured VM instance instance-0000001b to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 976.180292] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7649072-5e42-4d4d-93fb-1cb054383ade {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.199098] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Waiting for the task: (returnval){ [ 976.199098] env[61978]: value = "task-1394945" [ 976.199098] env[61978]: _type = "Task" [ 976.199098] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.213382] env[61978]: DEBUG nova.compute.manager [req-5ffdf454-51b9-4a8f-9b0a-39af9e53b5be req-acb37593-1f37-4952-816b-4aee2cd47dee service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Received event network-changed-b04a501f-29a8-442a-9f2c-dddd76f5e335 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 976.213503] env[61978]: DEBUG nova.compute.manager [req-5ffdf454-51b9-4a8f-9b0a-39af9e53b5be req-acb37593-1f37-4952-816b-4aee2cd47dee service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Refreshing instance network info cache due to event network-changed-b04a501f-29a8-442a-9f2c-dddd76f5e335. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 976.213766] env[61978]: DEBUG oslo_concurrency.lockutils [req-5ffdf454-51b9-4a8f-9b0a-39af9e53b5be req-acb37593-1f37-4952-816b-4aee2cd47dee service nova] Acquiring lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.213912] env[61978]: DEBUG oslo_concurrency.lockutils [req-5ffdf454-51b9-4a8f-9b0a-39af9e53b5be req-acb37593-1f37-4952-816b-4aee2cd47dee service nova] Acquired lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.214155] env[61978]: DEBUG nova.network.neutron [req-5ffdf454-51b9-4a8f-9b0a-39af9e53b5be req-acb37593-1f37-4952-816b-4aee2cd47dee service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Refreshing network info cache for port b04a501f-29a8-442a-9f2c-dddd76f5e335 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 976.219818] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394945, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.290416] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394941, 'name': ReconfigVM_Task, 'duration_secs': 0.554583} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.291401] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Reconfigured VM instance instance-00000028 to attach disk [datastore2] bdfdd685-e440-4f53-b6c4-2ee2f06acba8/bdfdd685-e440-4f53-b6c4-2ee2f06acba8.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 976.292135] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2779e50c-a0da-4f61-aa72-695e4fe898c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.302238] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 976.302238] env[61978]: value = "task-1394946" [ 976.302238] env[61978]: _type = "Task" [ 976.302238] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.316824] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394946, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.394305] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.401477] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.405146] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394942, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.454357] env[61978]: DEBUG nova.objects.base [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 976.488259] env[61978]: DEBUG oslo_vmware.api [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394944, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.570051] env[61978]: INFO nova.compute.manager [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Took 42.05 seconds to build instance. [ 976.665790] env[61978]: DEBUG nova.compute.utils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 976.671984] env[61978]: DEBUG nova.compute.manager [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 976.671984] env[61978]: DEBUG nova.network.neutron [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 976.714526] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.803298] env[61978]: DEBUG nova.policy [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5f3e212dd2443369b4c8022efeaebdb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4ea5d0199a04f959e59a8e134687392', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 976.819906] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394946, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.920576] env[61978]: DEBUG oslo_vmware.api [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394942, 'name': RemoveSnapshot_Task, 'duration_secs': 1.395272} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.920862] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 976.921512] env[61978]: INFO nova.compute.manager [None req-12cec99c-f9d6-4ca4-9c0a-cf8052153512 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Took 16.58 seconds to snapshot the instance on the hypervisor. [ 976.988863] env[61978]: DEBUG oslo_vmware.api [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394944, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.072558] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55b90149-505d-49e3-a4ae-b129b4ad73f9 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.324s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.170075] env[61978]: DEBUG nova.compute.manager [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 977.212189] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394945, 'name': ReconfigVM_Task, 'duration_secs': 0.752688} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.212582] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295814', 'volume_id': '8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'name': 'volume-8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e30d4a9f-1d75-453c-9552-2a0fbd4aa87d', 'attached_at': '', 'detached_at': '', 'volume_id': '8895fd52-1e47-4be8-bc86-5cb974e51fe1', 'serial': '8895fd52-1e47-4be8-bc86-5cb974e51fe1'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 977.213278] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 977.213523] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085dbc25-2dc3-4635-8f8b-4593e0d574bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.222138] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 977.224716] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-49a081b9-3252-4a4b-8f73-8782c8b103ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.316573] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 977.320652] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 977.320987] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Deleting the datastore file [datastore2] e30d4a9f-1d75-453c-9552-2a0fbd4aa87d {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 977.324799] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11e25bf7-9154-4913-8881-d9c0d1ed506d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.326765] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394946, 'name': Rename_Task, 'duration_secs': 0.690487} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.327209] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 977.327913] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db5c81a0-1f4c-4110-b2a8-155eedf29b49 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.333296] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Waiting for the task: (returnval){ [ 977.333296] env[61978]: value = "task-1394948" [ 977.333296] env[61978]: _type = "Task" [ 977.333296] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.337892] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 977.337892] env[61978]: value = "task-1394949" [ 977.337892] env[61978]: _type = "Task" [ 977.337892] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.344065] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.352278] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.410883] env[61978]: INFO nova.compute.manager [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Rebuilding instance [ 977.470353] env[61978]: DEBUG nova.compute.manager [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 977.471383] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7184812e-ab9a-48d4-a01b-404966887654 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.493070] env[61978]: DEBUG oslo_vmware.api [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394944, 'name': PowerOffVM_Task, 'duration_secs': 1.0453} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.493466] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 977.493623] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 977.493959] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-399ebaf9-45e4-4b66-9889-9e546a253a32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.508128] env[61978]: DEBUG nova.network.neutron [req-5ffdf454-51b9-4a8f-9b0a-39af9e53b5be req-acb37593-1f37-4952-816b-4aee2cd47dee service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Updated VIF entry in instance network info cache for port b04a501f-29a8-442a-9f2c-dddd76f5e335. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 977.510012] env[61978]: DEBUG nova.network.neutron [req-5ffdf454-51b9-4a8f-9b0a-39af9e53b5be req-acb37593-1f37-4952-816b-4aee2cd47dee service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Updating instance_info_cache with network_info: [{"id": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "address": "fa:16:3e:62:14:fb", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb04a501f-29", "ovs_interfaceid": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.575805] env[61978]: DEBUG nova.compute.manager [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 977.580057] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 977.580611] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 977.580611] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Deleting the datastore file [datastore2] ff793464-9bef-449f-8485-36d3b8fb1d69 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 977.581108] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d83ae5d-b0a4-469f-a022-d8c5f913e909 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.590511] env[61978]: DEBUG oslo_vmware.api [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Waiting for the task: (returnval){ [ 977.590511] env[61978]: value = "task-1394951" [ 977.590511] env[61978]: _type = "Task" [ 977.590511] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.607498] env[61978]: DEBUG oslo_vmware.api [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394951, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.699209] env[61978]: DEBUG nova.network.neutron [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Successfully created port: 631d15db-8176-407e-8ab9-1b7e5a095d9a {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 977.735157] env[61978]: DEBUG nova.network.neutron [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Updating instance_info_cache with network_info: [{"id": "5ad44f35-6aec-4586-a2e9-9f486fa4fd57", "address": "fa:16:3e:b6:da:f2", "network": {"id": "2f12a4fc-0daf-4101-919d-9b9372dcbb2f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1016739189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27ccd1f7b852490a8d92e2c0e714e7d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ad44f35-6a", "ovs_interfaceid": "5ad44f35-6aec-4586-a2e9-9f486fa4fd57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.753135] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668618f4-c911-4d28-9b55-e47968f40af2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.764515] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc196e4-3a04-4021-8076-b5b13aa70dec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.800653] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c17de8-1a45-4d22-a39b-01969c3dabe8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.810290] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf4ea2d-4f2c-42ca-b3e1-8cc6969d7efa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.826427] env[61978]: DEBUG nova.compute.provider_tree [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.849359] env[61978]: DEBUG oslo_vmware.api [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Task: {'id': task-1394948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135661} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.850255] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.850626] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 977.850955] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 977.851335] env[61978]: INFO nova.compute.manager [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Took 3.86 seconds to destroy the instance on the hypervisor. [ 977.851743] env[61978]: DEBUG oslo.service.loopingcall [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 977.852374] env[61978]: DEBUG nova.compute.manager [-] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 977.852536] env[61978]: DEBUG nova.network.neutron [-] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 977.857987] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394949, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.994956] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 977.995356] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7f4b705-de10-4618-abec-5aa6e23019d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.006825] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 978.006825] env[61978]: value = "task-1394952" [ 978.006825] env[61978]: _type = "Task" [ 978.006825] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.011945] env[61978]: DEBUG oslo_concurrency.lockutils [req-5ffdf454-51b9-4a8f-9b0a-39af9e53b5be req-acb37593-1f37-4952-816b-4aee2cd47dee service nova] Releasing lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.012115] env[61978]: DEBUG nova.compute.manager [req-5ffdf454-51b9-4a8f-9b0a-39af9e53b5be req-acb37593-1f37-4952-816b-4aee2cd47dee service nova] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Received event network-vif-deleted-1d9ac1c2-acc1-4cef-8a1e-445797f69a52 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 978.012298] env[61978]: DEBUG nova.compute.manager [req-5ffdf454-51b9-4a8f-9b0a-39af9e53b5be req-acb37593-1f37-4952-816b-4aee2cd47dee service nova] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Received event network-vif-deleted-7851dad1-d943-463b-82c3-1a83ddc35c79 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 978.016270] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394952, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.105415] env[61978]: DEBUG oslo_vmware.api [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Task: {'id': task-1394951, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205272} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.105793] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 978.106135] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 978.106347] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 978.106570] env[61978]: INFO nova.compute.manager [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Took 2.17 seconds to destroy the instance on the hypervisor. [ 978.106942] env[61978]: DEBUG oslo.service.loopingcall [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 978.108071] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.108863] env[61978]: DEBUG nova.compute.manager [-] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 978.108863] env[61978]: DEBUG nova.network.neutron [-] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 978.187316] env[61978]: DEBUG nova.compute.manager [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 978.217726] env[61978]: DEBUG nova.virt.hardware [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 978.218033] env[61978]: DEBUG nova.virt.hardware [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 978.219013] env[61978]: DEBUG nova.virt.hardware [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 978.219013] env[61978]: DEBUG nova.virt.hardware [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 978.219013] env[61978]: DEBUG nova.virt.hardware [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 978.219013] env[61978]: DEBUG nova.virt.hardware [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 978.219013] env[61978]: DEBUG nova.virt.hardware [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 978.219223] env[61978]: DEBUG nova.virt.hardware [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 978.219223] env[61978]: DEBUG nova.virt.hardware [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 978.219309] env[61978]: DEBUG nova.virt.hardware [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 978.219482] env[61978]: DEBUG nova.virt.hardware [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 978.220423] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05be4b4e-7145-424e-856a-d8d1cec93c64 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.233058] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8df3a77-789f-4e56-86d0-e721acb72965 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.239932] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Releasing lock "refresh_cache-b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.329456] env[61978]: DEBUG nova.scheduler.client.report [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 978.355747] env[61978]: DEBUG oslo_vmware.api [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394949, 'name': PowerOnVM_Task, 'duration_secs': 0.532508} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.356823] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 978.356967] env[61978]: INFO nova.compute.manager [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Took 10.55 seconds to spawn the instance on the hypervisor. [ 978.357400] env[61978]: DEBUG nova.compute.manager [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 978.357997] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99faede8-d919-43f9-9ae0-45de2098a8c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.516975] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394952, 'name': PowerOffVM_Task, 'duration_secs': 0.272695} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.519500] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 978.519745] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 978.521283] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7333f47b-3c6b-42e8-a6ce-8f564f7dfc16 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.533450] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 978.533723] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-207c92b1-e8b5-4911-a8cb-02def3ddf3de {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.633629] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 978.633629] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 978.633629] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleting the datastore file [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 978.633629] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0be50ae6-3de4-4d00-969a-84ad93595ea3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.641209] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 978.641209] env[61978]: value = "task-1394954" [ 978.641209] env[61978]: _type = "Task" [ 978.641209] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.652858] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.753251] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.753570] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a114971-829e-4d37-8f7c-a7863aee5cd9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.761771] env[61978]: DEBUG oslo_vmware.api [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 978.761771] env[61978]: value = "task-1394955" [ 978.761771] env[61978]: _type = "Task" [ 978.761771] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.771270] env[61978]: DEBUG oslo_vmware.api [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.834531] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.676s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.835226] env[61978]: DEBUG nova.compute.manager [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 978.838148] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.527s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.839631] env[61978]: INFO nova.compute.claims [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 978.888090] env[61978]: INFO nova.compute.manager [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Took 41.80 seconds to build instance. [ 979.058804] env[61978]: DEBUG nova.compute.manager [req-3171b3d6-f03d-4d50-b3a6-7422b7e9778b req-0e1da0b7-6b2a-459f-bba4-832b999206c7 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Received event network-vif-deleted-9b850a07-34a8-4e1d-afff-7650895b0238 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 979.058804] env[61978]: INFO nova.compute.manager [req-3171b3d6-f03d-4d50-b3a6-7422b7e9778b req-0e1da0b7-6b2a-459f-bba4-832b999206c7 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Neutron deleted interface 9b850a07-34a8-4e1d-afff-7650895b0238; detaching it from the instance and deleting it from the info cache [ 979.058804] env[61978]: DEBUG nova.network.neutron [req-3171b3d6-f03d-4d50-b3a6-7422b7e9778b req-0e1da0b7-6b2a-459f-bba4-832b999206c7 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.068335] env[61978]: DEBUG nova.network.neutron [-] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.158461] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.335001} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.158924] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 979.159290] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 979.159556] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 979.276391] env[61978]: DEBUG oslo_vmware.api [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394955, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.344628] env[61978]: DEBUG nova.compute.utils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 979.349187] env[61978]: DEBUG nova.compute.manager [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 979.349362] env[61978]: DEBUG nova.network.neutron [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 979.390659] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dfae6df9-68eb-428a-9e88-bd8a071d4f54 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.643s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.409330] env[61978]: DEBUG nova.policy [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a208cee3d9c4efb8240ad943b55e915', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86ad52b551104a2594f1dbbc287f9efa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 979.506230] env[61978]: DEBUG nova.network.neutron [-] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.573184] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ccb5ba15-e0cd-44f3-890e-a7b51efe2d59 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.576150] env[61978]: INFO nova.compute.manager [-] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Took 1.72 seconds to deallocate network for instance. [ 979.589753] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829428a1-c0fa-4aad-b715-2b9209c4fa6c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.628850] env[61978]: DEBUG nova.compute.manager [req-3171b3d6-f03d-4d50-b3a6-7422b7e9778b req-0e1da0b7-6b2a-459f-bba4-832b999206c7 service nova] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Detach interface failed, port_id=9b850a07-34a8-4e1d-afff-7650895b0238, reason: Instance e30d4a9f-1d75-453c-9552-2a0fbd4aa87d could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 979.768311] env[61978]: INFO nova.compute.manager [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Rescuing [ 979.772329] env[61978]: DEBUG oslo_concurrency.lockutils [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "refresh_cache-bdfdd685-e440-4f53-b6c4-2ee2f06acba8" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.772329] env[61978]: DEBUG oslo_concurrency.lockutils [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "refresh_cache-bdfdd685-e440-4f53-b6c4-2ee2f06acba8" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.772329] env[61978]: DEBUG nova.network.neutron [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 979.780313] env[61978]: DEBUG oslo_vmware.api [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394955, 'name': PowerOnVM_Task, 'duration_secs': 0.55482} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.780583] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.782215] env[61978]: DEBUG nova.compute.manager [None req-d899934e-238b-4676-8922-a3b5ff854fcd tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 979.783676] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb73630-9e2e-4f23-b7c5-47ebd7dbb7c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.855195] env[61978]: DEBUG nova.compute.manager [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 979.895752] env[61978]: DEBUG nova.compute.manager [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 979.921184] env[61978]: DEBUG nova.network.neutron [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Successfully created port: 7417d7e9-723d-408d-bfa4-e583af757e79 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 980.010468] env[61978]: INFO nova.compute.manager [-] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Took 1.90 seconds to deallocate network for instance. [ 980.151381] env[61978]: INFO nova.compute.manager [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Took 0.57 seconds to detach 1 volumes for instance. [ 980.156099] env[61978]: DEBUG nova.compute.manager [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Deleting volume: 8895fd52-1e47-4be8-bc86-5cb974e51fe1 {{(pid=61978) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 980.211296] env[61978]: DEBUG nova.virt.hardware [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 980.211578] env[61978]: DEBUG nova.virt.hardware [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 980.211696] env[61978]: DEBUG nova.virt.hardware [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 980.211869] env[61978]: DEBUG nova.virt.hardware [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 980.212033] env[61978]: DEBUG nova.virt.hardware [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 980.212193] env[61978]: DEBUG nova.virt.hardware [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 980.212396] env[61978]: DEBUG nova.virt.hardware [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 980.212886] env[61978]: DEBUG nova.virt.hardware [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 980.212886] env[61978]: DEBUG nova.virt.hardware [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 980.213051] env[61978]: DEBUG nova.virt.hardware [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 980.213240] env[61978]: DEBUG nova.virt.hardware [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 980.214131] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6148a72c-c3e7-46ef-8a3a-ac85c5bc41d8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.228500] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29488136-57ca-4234-8d20-d16d765fb9f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.245269] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:56:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4c8c8fd-baca-4e60-97dc-ff0418d63215', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15ee1476-11da-4794-a070-c4365a572948', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 980.253326] env[61978]: DEBUG oslo.service.loopingcall [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 980.257095] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 980.257586] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f3e17c1-b75e-4344-ac5c-d1d981285db0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.284705] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 980.284705] env[61978]: value = "task-1394957" [ 980.284705] env[61978]: _type = "Task" [ 980.284705] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.293990] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394957, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.421065] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.494266] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca951d7e-54a9-43db-bfb0-34bfbb4609e2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.505676] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddecbc80-5230-44fe-a8a7-0b4f796652e5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.518847] env[61978]: DEBUG oslo_concurrency.lockutils [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.555603] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc45e09-6ccb-4129-a125-d528a6ab8301 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.567964] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff9868b-f00c-4379-a72d-edf9c6918834 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.586687] env[61978]: DEBUG nova.compute.provider_tree [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.588852] env[61978]: DEBUG nova.network.neutron [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Successfully updated port: 631d15db-8176-407e-8ab9-1b7e5a095d9a {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.707279] env[61978]: DEBUG oslo_concurrency.lockutils [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.713394] env[61978]: DEBUG nova.compute.manager [req-318c72be-12e5-43bb-8cdd-a1fc4d44ae0b req-7de8b1cd-12df-4aab-b846-35671baa1488 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Received event network-vif-plugged-631d15db-8176-407e-8ab9-1b7e5a095d9a {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 980.713649] env[61978]: DEBUG oslo_concurrency.lockutils [req-318c72be-12e5-43bb-8cdd-a1fc4d44ae0b req-7de8b1cd-12df-4aab-b846-35671baa1488 service nova] Acquiring lock "f1001633-e4e5-4de1-8a6b-cf653e43d821-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.713923] env[61978]: DEBUG oslo_concurrency.lockutils [req-318c72be-12e5-43bb-8cdd-a1fc4d44ae0b req-7de8b1cd-12df-4aab-b846-35671baa1488 service nova] Lock "f1001633-e4e5-4de1-8a6b-cf653e43d821-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.714230] env[61978]: DEBUG oslo_concurrency.lockutils [req-318c72be-12e5-43bb-8cdd-a1fc4d44ae0b req-7de8b1cd-12df-4aab-b846-35671baa1488 service nova] Lock "f1001633-e4e5-4de1-8a6b-cf653e43d821-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.714359] env[61978]: DEBUG nova.compute.manager [req-318c72be-12e5-43bb-8cdd-a1fc4d44ae0b req-7de8b1cd-12df-4aab-b846-35671baa1488 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] No waiting events found dispatching network-vif-plugged-631d15db-8176-407e-8ab9-1b7e5a095d9a {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 980.714546] env[61978]: WARNING nova.compute.manager [req-318c72be-12e5-43bb-8cdd-a1fc4d44ae0b req-7de8b1cd-12df-4aab-b846-35671baa1488 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Received unexpected event network-vif-plugged-631d15db-8176-407e-8ab9-1b7e5a095d9a for instance with vm_state building and task_state spawning. [ 980.798105] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394957, 'name': CreateVM_Task, 'duration_secs': 0.478754} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.800956] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 980.801672] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.801837] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.802211] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 980.803108] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a5eb06f-5a2d-4f42-ac07-87e0b3b099d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.809835] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 980.809835] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5297b7ae-f4cb-b5f5-0cdb-6d7a7b9b34e3" [ 980.809835] env[61978]: _type = "Task" [ 980.809835] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.818327] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5297b7ae-f4cb-b5f5-0cdb-6d7a7b9b34e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.876836] env[61978]: DEBUG nova.compute.manager [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 980.909109] env[61978]: DEBUG nova.virt.hardware [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 980.909370] env[61978]: DEBUG nova.virt.hardware [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 980.909530] env[61978]: DEBUG nova.virt.hardware [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 980.909714] env[61978]: DEBUG nova.virt.hardware [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 980.910241] env[61978]: DEBUG nova.virt.hardware [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 980.910423] env[61978]: DEBUG nova.virt.hardware [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 980.910692] env[61978]: DEBUG nova.virt.hardware [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 980.910889] env[61978]: DEBUG nova.virt.hardware [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 980.911085] env[61978]: DEBUG nova.virt.hardware [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 980.911258] env[61978]: DEBUG nova.virt.hardware [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 980.911436] env[61978]: DEBUG nova.virt.hardware [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 980.912381] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4511c738-e5ee-4250-856b-a781626a97c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.921593] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d149ea-533e-4691-9886-36f05c9bbce2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.944428] env[61978]: DEBUG nova.network.neutron [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Updating instance_info_cache with network_info: [{"id": "4c3e3550-3780-4cf8-b191-9a82b2f340f2", "address": "fa:16:3e:4f:d0:71", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c3e3550-37", "ovs_interfaceid": "4c3e3550-3780-4cf8-b191-9a82b2f340f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.095643] env[61978]: DEBUG nova.scheduler.client.report [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 981.100014] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Acquiring lock "refresh_cache-f1001633-e4e5-4de1-8a6b-cf653e43d821" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.100163] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Acquired lock "refresh_cache-f1001633-e4e5-4de1-8a6b-cf653e43d821" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.100336] env[61978]: DEBUG nova.network.neutron [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.102173] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Acquiring lock "32bcb974-8db9-43e2-b397-b497f3a4f30c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.102495] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Lock "32bcb974-8db9-43e2-b397-b497f3a4f30c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.103066] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Acquiring lock "32bcb974-8db9-43e2-b397-b497f3a4f30c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.103066] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Lock "32bcb974-8db9-43e2-b397-b497f3a4f30c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.103517] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Lock "32bcb974-8db9-43e2-b397-b497f3a4f30c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.107759] env[61978]: INFO nova.compute.manager [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Terminating instance [ 981.112245] env[61978]: DEBUG nova.compute.manager [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 981.112545] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 981.113680] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f381508-e79d-48da-8cfc-a4db5e843788 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.124603] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.124901] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0dc2ce8-59fe-4b7d-b093-ba1a737b552f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.132423] env[61978]: DEBUG oslo_vmware.api [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Waiting for the task: (returnval){ [ 981.132423] env[61978]: value = "task-1394958" [ 981.132423] env[61978]: _type = "Task" [ 981.132423] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.141694] env[61978]: DEBUG oslo_vmware.api [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394958, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.333690] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5297b7ae-f4cb-b5f5-0cdb-6d7a7b9b34e3, 'name': SearchDatastore_Task, 'duration_secs': 0.010694} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.333866] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.334056] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 981.334224] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.334380] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.334630] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 981.334995] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f2fdfb6-8670-49e7-be3d-22362bf1056e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.347047] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 981.347382] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 981.348037] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52780c8f-9901-4748-afde-4245703df23f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.355881] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 981.355881] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5224d292-4483-e6d3-e6b2-ac0e9c0b1ebc" [ 981.355881] env[61978]: _type = "Task" [ 981.355881] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.363653] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5224d292-4483-e6d3-e6b2-ac0e9c0b1ebc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.446065] env[61978]: DEBUG nova.compute.manager [req-7c12689e-8499-4887-82b2-95e697236bb0 req-1417dc0e-a91a-4226-88a1-9334a7e3d393 service nova] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Received event network-vif-deleted-6248c9c9-4f43-44c4-a25a-63b0c9920e89 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 981.447325] env[61978]: DEBUG oslo_concurrency.lockutils [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "refresh_cache-bdfdd685-e440-4f53-b6c4-2ee2f06acba8" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.526715] env[61978]: DEBUG nova.compute.manager [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 981.528012] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e8cb58-3131-4a05-9ff9-621fa31dba37 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.605485] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.765s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.605485] env[61978]: DEBUG nova.compute.manager [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 981.606944] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.113s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.610170] env[61978]: INFO nova.compute.claims [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 981.647023] env[61978]: DEBUG oslo_vmware.api [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394958, 'name': PowerOffVM_Task, 'duration_secs': 0.210494} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.647023] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.647023] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 981.647023] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00cce754-4cae-4702-b95f-f7c8ba37129c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.654935] env[61978]: DEBUG nova.network.neutron [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 981.717858] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 981.718129] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 981.718349] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Deleting the datastore file [datastore2] 32bcb974-8db9-43e2-b397-b497f3a4f30c {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 981.718690] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29f8c23e-7edd-45e5-a30d-f1d5f2ccbb3c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.728665] env[61978]: DEBUG oslo_vmware.api [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Waiting for the task: (returnval){ [ 981.728665] env[61978]: value = "task-1394960" [ 981.728665] env[61978]: _type = "Task" [ 981.728665] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.747468] env[61978]: DEBUG oslo_vmware.api [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.867960] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5224d292-4483-e6d3-e6b2-ac0e9c0b1ebc, 'name': SearchDatastore_Task, 'duration_secs': 0.011476} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.873017] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf34aa4d-a8de-4aba-ac76-b9d6d94675d2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.880637] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 981.880637] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520aae10-64e2-81b7-dd31-8d96880ea328" [ 981.880637] env[61978]: _type = "Task" [ 981.880637] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.886268] env[61978]: DEBUG nova.network.neutron [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Updating instance_info_cache with network_info: [{"id": "631d15db-8176-407e-8ab9-1b7e5a095d9a", "address": "fa:16:3e:e3:c0:f1", "network": {"id": "6ca2a9c9-ad13-4e1f-9a03-8a2d2f942025", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-354897565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4ea5d0199a04f959e59a8e134687392", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631d15db-81", "ovs_interfaceid": "631d15db-8176-407e-8ab9-1b7e5a095d9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.898316] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520aae10-64e2-81b7-dd31-8d96880ea328, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.978873] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.978873] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d78c54a9-fca2-428b-88cc-53acdd813b68 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.987355] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 981.987355] env[61978]: value = "task-1394961" [ 981.987355] env[61978]: _type = "Task" [ 981.987355] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.999876] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394961, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.043681] env[61978]: INFO nova.compute.manager [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] instance snapshotting [ 982.046855] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243e2b42-6527-443f-abc1-8f2fb521e494 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.067600] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d139aa-f856-4ae0-92ae-7c83fefc2653 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.115299] env[61978]: DEBUG nova.compute.utils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 982.117021] env[61978]: DEBUG nova.compute.manager [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 982.117315] env[61978]: DEBUG nova.network.neutron [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 982.187745] env[61978]: DEBUG nova.network.neutron [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Successfully updated port: 7417d7e9-723d-408d-bfa4-e583af757e79 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 982.218212] env[61978]: DEBUG nova.policy [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4782614e183484d800b1a9fbc19e51b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '252acdf1eb624fbf91eb9e90c011c038', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 982.239889] env[61978]: DEBUG oslo_vmware.api [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Task: {'id': task-1394960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159988} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.241177] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.241404] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.241590] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.241775] env[61978]: INFO nova.compute.manager [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 982.242047] env[61978]: DEBUG oslo.service.loopingcall [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.242253] env[61978]: DEBUG nova.compute.manager [-] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 982.242352] env[61978]: DEBUG nova.network.neutron [-] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.388402] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Releasing lock "refresh_cache-f1001633-e4e5-4de1-8a6b-cf653e43d821" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.388807] env[61978]: DEBUG nova.compute.manager [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Instance network_info: |[{"id": "631d15db-8176-407e-8ab9-1b7e5a095d9a", "address": "fa:16:3e:e3:c0:f1", "network": {"id": "6ca2a9c9-ad13-4e1f-9a03-8a2d2f942025", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-354897565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4ea5d0199a04f959e59a8e134687392", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631d15db-81", "ovs_interfaceid": "631d15db-8176-407e-8ab9-1b7e5a095d9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 982.396058] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:c0:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f096917-a0cf-4add-a9d2-23ca1c723b3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '631d15db-8176-407e-8ab9-1b7e5a095d9a', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 982.413018] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Creating folder: Project (f4ea5d0199a04f959e59a8e134687392). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 982.413018] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520aae10-64e2-81b7-dd31-8d96880ea328, 'name': SearchDatastore_Task, 'duration_secs': 0.013335} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.413018] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a036c28-d157-447d-b0c1-90c251becce4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.413018] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.413439] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13/92eb5edb-803b-48d4-8c4f-338d7c3b3d13.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 982.413503] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4161e040-a906-4dc1-b6ed-bfe97305f42f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.426365] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 982.426365] env[61978]: value = "task-1394963" [ 982.426365] env[61978]: _type = "Task" [ 982.426365] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.431252] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Created folder: Project (f4ea5d0199a04f959e59a8e134687392) in parent group-v295764. [ 982.431371] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Creating folder: Instances. Parent ref: group-v295877. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 982.432069] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71253d5c-e3e7-4461-89e3-17481d055fef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.439536] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.450774] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Created folder: Instances in parent group-v295877. [ 982.451237] env[61978]: DEBUG oslo.service.loopingcall [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.451543] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 982.451927] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a1a2a9c-fcd1-4634-8f5a-c65db7de9db1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.476229] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 982.476229] env[61978]: value = "task-1394965" [ 982.476229] env[61978]: _type = "Task" [ 982.476229] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.499595] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394965, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.507116] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394961, 'name': PowerOffVM_Task, 'duration_secs': 0.422415} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.507457] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 982.508737] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8246d791-a9f1-4809-8860-43ca80a5f9f5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.538805] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdd29ef-6a11-475e-8c0b-c7f95218150f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.546018] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.546018] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.578932] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 982.579717] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2e66453d-dbaf-4788-af30-a463c3f4f92a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.586806] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 982.587133] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dca0e5a8-dd4e-4a70-bb7f-7fe3c79ad0f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.596902] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 982.596902] env[61978]: value = "task-1394966" [ 982.596902] env[61978]: _type = "Task" [ 982.596902] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.598726] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 982.598726] env[61978]: value = "task-1394967" [ 982.598726] env[61978]: _type = "Task" [ 982.598726] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.619806] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 982.620381] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 982.620857] env[61978]: DEBUG oslo_concurrency.lockutils [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.622021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.622021] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.622021] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394966, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.622567] env[61978]: DEBUG nova.compute.manager [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 982.625532] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4d962f1-cc81-4304-9b7f-0f2e36f16e71 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.642984] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.642984] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 982.644147] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3efdc40-7095-4357-a738-fd715bac76f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.656902] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 982.656902] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527cb5b2-e3b7-a0ce-28cf-9b0ec64a3109" [ 982.656902] env[61978]: _type = "Task" [ 982.656902] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.682465] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527cb5b2-e3b7-a0ce-28cf-9b0ec64a3109, 'name': SearchDatastore_Task, 'duration_secs': 0.015563} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.687352] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cde6bcf-b325-40da-9f4e-271b9f7052d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.694888] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.694888] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.694888] env[61978]: DEBUG nova.network.neutron [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 982.700736] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 982.700736] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5277f756-df35-3d67-b042-468ee3750708" [ 982.700736] env[61978]: _type = "Task" [ 982.700736] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.720504] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5277f756-df35-3d67-b042-468ee3750708, 'name': SearchDatastore_Task, 'duration_secs': 0.020548} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.721262] env[61978]: DEBUG oslo_concurrency.lockutils [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.721262] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] bdfdd685-e440-4f53-b6c4-2ee2f06acba8/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk. {{(pid=61978) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 982.721537] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0efce18e-713b-48a3-a543-15a7140e8fe5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.736913] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 982.736913] env[61978]: value = "task-1394968" [ 982.736913] env[61978]: _type = "Task" [ 982.736913] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.745566] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394968, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.861083] env[61978]: DEBUG nova.network.neutron [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Successfully created port: fcd64700-31ef-4310-8986-b22e515b1c55 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 982.942026] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394963, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.998420] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.998420] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.998420] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394965, 'name': CreateVM_Task, 'duration_secs': 0.387456} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.998420] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 982.999058] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.999269] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.999682] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 983.000292] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a11649bd-7ac7-4a3e-9e91-ca92b3f9b744 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.011809] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Waiting for the task: (returnval){ [ 983.011809] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e9aade-bc49-5d19-8d8f-71758841ddb2" [ 983.011809] env[61978]: _type = "Task" [ 983.011809] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.024092] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e9aade-bc49-5d19-8d8f-71758841ddb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.064801] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "adf25af8-28c4-444e-b849-88d643f57dcf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.065394] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "adf25af8-28c4-444e-b849-88d643f57dcf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.109786] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394966, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.254802] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394968, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.297601] env[61978]: DEBUG nova.network.neutron [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 983.409575] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3b93b2-5fa6-4e58-8a02-1ca2b1431426 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.420331] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3331ee70-2169-42d3-9d44-e42da3a9daae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.463583] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e214d047-7b59-4b57-9a5e-4a893a71d0a3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.470991] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394963, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543665} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.472063] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13/92eb5edb-803b-48d4-8c4f-338d7c3b3d13.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 983.472063] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 983.472272] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b15fb3ec-ce82-47d0-a955-afe60cbde592 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.481505] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c13a68-286b-4d1b-9b8d-4281c925f909 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.485436] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 983.485436] env[61978]: value = "task-1394969" [ 983.485436] env[61978]: _type = "Task" [ 983.485436] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.500583] env[61978]: DEBUG nova.compute.provider_tree [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.506467] env[61978]: DEBUG nova.compute.manager [req-a5b7e065-0c89-40e6-8b35-401d34b0daae req-34aece05-c49d-4c3b-a6fb-f20a3c148912 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Received event network-changed-631d15db-8176-407e-8ab9-1b7e5a095d9a {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 983.506886] env[61978]: DEBUG nova.compute.manager [req-a5b7e065-0c89-40e6-8b35-401d34b0daae req-34aece05-c49d-4c3b-a6fb-f20a3c148912 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Refreshing instance network info cache due to event network-changed-631d15db-8176-407e-8ab9-1b7e5a095d9a. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 983.506964] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5b7e065-0c89-40e6-8b35-401d34b0daae req-34aece05-c49d-4c3b-a6fb-f20a3c148912 service nova] Acquiring lock "refresh_cache-f1001633-e4e5-4de1-8a6b-cf653e43d821" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.507071] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5b7e065-0c89-40e6-8b35-401d34b0daae req-34aece05-c49d-4c3b-a6fb-f20a3c148912 service nova] Acquired lock "refresh_cache-f1001633-e4e5-4de1-8a6b-cf653e43d821" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.507241] env[61978]: DEBUG nova.network.neutron [req-a5b7e065-0c89-40e6-8b35-401d34b0daae req-34aece05-c49d-4c3b-a6fb-f20a3c148912 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Refreshing network info cache for port 631d15db-8176-407e-8ab9-1b7e5a095d9a {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 983.512544] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394969, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.525247] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e9aade-bc49-5d19-8d8f-71758841ddb2, 'name': SearchDatastore_Task, 'duration_secs': 0.066414} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.525873] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.526010] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 983.526253] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.526406] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.526583] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 983.526852] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ac6301c-9aeb-490d-9ed1-c8ba4681d78a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.537252] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 983.537447] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 983.538187] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a917d7f9-d485-494d-82dc-8aae9b435ad2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.543922] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Waiting for the task: (returnval){ [ 983.543922] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52027e3d-413e-4d3d-6307-c6abf9bad078" [ 983.543922] env[61978]: _type = "Task" [ 983.543922] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.553172] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52027e3d-413e-4d3d-6307-c6abf9bad078, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.610545] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394966, 'name': CreateSnapshot_Task, 'duration_secs': 0.713515} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.613330] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 983.614185] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32b0a17-d27d-45ec-9750-1f6c905c6570 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.638734] env[61978]: DEBUG nova.compute.manager [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 983.669250] env[61978]: DEBUG nova.virt.hardware [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T15:05:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='85790f6c-8872-4acd-90a5-40fd0cc369d4',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1158956347',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 983.669626] env[61978]: DEBUG nova.virt.hardware [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 983.669696] env[61978]: DEBUG nova.virt.hardware [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 983.669849] env[61978]: DEBUG nova.virt.hardware [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 983.670027] env[61978]: DEBUG nova.virt.hardware [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 983.670212] env[61978]: DEBUG nova.virt.hardware [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 983.670432] env[61978]: DEBUG nova.virt.hardware [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 983.670595] env[61978]: DEBUG nova.virt.hardware [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 983.670765] env[61978]: DEBUG nova.virt.hardware [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 983.670930] env[61978]: DEBUG nova.virt.hardware [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 983.671117] env[61978]: DEBUG nova.virt.hardware [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 983.671965] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458a7a05-156e-48b2-a8d7-9f5c0c77516f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.682174] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afabfe07-9e94-49d4-bb9c-f2d45b71d935 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.719840] env[61978]: DEBUG nova.network.neutron [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance_info_cache with network_info: [{"id": "7417d7e9-723d-408d-bfa4-e583af757e79", "address": "fa:16:3e:e6:e3:c6", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7417d7e9-72", "ovs_interfaceid": "7417d7e9-723d-408d-bfa4-e583af757e79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.747507] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394968, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599022} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.747706] env[61978]: INFO nova.virt.vmwareapi.ds_util [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] bdfdd685-e440-4f53-b6c4-2ee2f06acba8/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk. [ 983.748539] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f4ea9a-be37-4a26-8b15-df6ffb60160e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.774411] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] bdfdd685-e440-4f53-b6c4-2ee2f06acba8/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.774874] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d431ec2-2f72-4ddc-b250-f6f9ddbc9ba7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.788104] env[61978]: DEBUG nova.network.neutron [-] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.796593] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 983.796593] env[61978]: value = "task-1394970" [ 983.796593] env[61978]: _type = "Task" [ 983.796593] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.805826] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394970, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.999200] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394969, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.303764} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.001482] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 984.002282] env[61978]: DEBUG nova.compute.manager [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Received event network-vif-plugged-7417d7e9-723d-408d-bfa4-e583af757e79 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 984.002531] env[61978]: DEBUG oslo_concurrency.lockutils [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] Acquiring lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.002770] env[61978]: DEBUG oslo_concurrency.lockutils [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.002946] env[61978]: DEBUG oslo_concurrency.lockutils [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.003191] env[61978]: DEBUG nova.compute.manager [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] No waiting events found dispatching network-vif-plugged-7417d7e9-723d-408d-bfa4-e583af757e79 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 984.003298] env[61978]: WARNING nova.compute.manager [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Received unexpected event network-vif-plugged-7417d7e9-723d-408d-bfa4-e583af757e79 for instance with vm_state building and task_state spawning. [ 984.003460] env[61978]: DEBUG nova.compute.manager [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Received event network-changed-7417d7e9-723d-408d-bfa4-e583af757e79 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 984.003615] env[61978]: DEBUG nova.compute.manager [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Refreshing instance network info cache due to event network-changed-7417d7e9-723d-408d-bfa4-e583af757e79. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 984.003819] env[61978]: DEBUG oslo_concurrency.lockutils [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] Acquiring lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.004979] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa6cebd-4ac8-4280-a4dd-1f19d1048844 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.020260] env[61978]: DEBUG nova.scheduler.client.report [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 984.035429] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13/92eb5edb-803b-48d4-8c4f-338d7c3b3d13.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 984.035751] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d89a12c-36ac-4798-901b-b8d738ad666c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.061529] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52027e3d-413e-4d3d-6307-c6abf9bad078, 'name': SearchDatastore_Task, 'duration_secs': 0.009782} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.063544] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 984.063544] env[61978]: value = "task-1394971" [ 984.063544] env[61978]: _type = "Task" [ 984.063544] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.063831] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdef89a4-7c56-4611-818e-795504452ca2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.076516] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Waiting for the task: (returnval){ [ 984.076516] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52279ee5-2bf5-a578-4883-ec57e01a9844" [ 984.076516] env[61978]: _type = "Task" [ 984.076516] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.076917] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394971, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.087425] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52279ee5-2bf5-a578-4883-ec57e01a9844, 'name': SearchDatastore_Task, 'duration_secs': 0.010583} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.087880] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.088243] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f1001633-e4e5-4de1-8a6b-cf653e43d821/f1001633-e4e5-4de1-8a6b-cf653e43d821.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 984.088621] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87f067aa-6331-4713-82d4-a48d4dcf10e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.097167] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Waiting for the task: (returnval){ [ 984.097167] env[61978]: value = "task-1394972" [ 984.097167] env[61978]: _type = "Task" [ 984.097167] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.106038] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1394972, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.139767] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 984.140091] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-44e74f82-5bfa-4f7f-80f3-a8b310a3e38a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.151136] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 984.151136] env[61978]: value = "task-1394973" [ 984.151136] env[61978]: _type = "Task" [ 984.151136] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.161770] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394973, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.222423] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.222844] env[61978]: DEBUG nova.compute.manager [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Instance network_info: |[{"id": "7417d7e9-723d-408d-bfa4-e583af757e79", "address": "fa:16:3e:e6:e3:c6", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7417d7e9-72", "ovs_interfaceid": "7417d7e9-723d-408d-bfa4-e583af757e79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 984.223274] env[61978]: DEBUG oslo_concurrency.lockutils [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] Acquired lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.223604] env[61978]: DEBUG nova.network.neutron [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Refreshing network info cache for port 7417d7e9-723d-408d-bfa4-e583af757e79 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 984.225211] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:e3:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7417d7e9-723d-408d-bfa4-e583af757e79', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.234921] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Creating folder: Project (86ad52b551104a2594f1dbbc287f9efa). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 984.239148] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b63bee5d-a589-4d00-b188-a1e71243ac32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.249804] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Created folder: Project (86ad52b551104a2594f1dbbc287f9efa) in parent group-v295764. [ 984.249804] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Creating folder: Instances. Parent ref: group-v295882. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 984.249804] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da8f5da2-34c0-4975-b6a9-c1d12afe606d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.264346] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Created folder: Instances in parent group-v295882. [ 984.264613] env[61978]: DEBUG oslo.service.loopingcall [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.264933] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 984.265193] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a75c857-5ec4-4f0f-a931-3feba506fa1c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.290551] env[61978]: INFO nova.compute.manager [-] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Took 2.05 seconds to deallocate network for instance. [ 984.294265] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.294265] env[61978]: value = "task-1394976" [ 984.294265] env[61978]: _type = "Task" [ 984.294265] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.311275] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394970, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.314603] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394976, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.537550] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.930s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.538101] env[61978]: DEBUG nova.compute.manager [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 984.541620] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.512s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.545025] env[61978]: INFO nova.compute.claims [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 984.589056] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394971, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.612836] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1394972, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.665549] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394973, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.671100] env[61978]: DEBUG nova.network.neutron [req-a5b7e065-0c89-40e6-8b35-401d34b0daae req-34aece05-c49d-4c3b-a6fb-f20a3c148912 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Updated VIF entry in instance network info cache for port 631d15db-8176-407e-8ab9-1b7e5a095d9a. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 984.671682] env[61978]: DEBUG nova.network.neutron [req-a5b7e065-0c89-40e6-8b35-401d34b0daae req-34aece05-c49d-4c3b-a6fb-f20a3c148912 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Updating instance_info_cache with network_info: [{"id": "631d15db-8176-407e-8ab9-1b7e5a095d9a", "address": "fa:16:3e:e3:c0:f1", "network": {"id": "6ca2a9c9-ad13-4e1f-9a03-8a2d2f942025", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-354897565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4ea5d0199a04f959e59a8e134687392", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631d15db-81", "ovs_interfaceid": "631d15db-8176-407e-8ab9-1b7e5a095d9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.807198] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.813403] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394976, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.816955] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394970, 'name': ReconfigVM_Task, 'duration_secs': 0.830359} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.817314] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Reconfigured VM instance instance-00000028 to attach disk [datastore2] bdfdd685-e440-4f53-b6c4-2ee2f06acba8/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 984.818308] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72542636-69c5-486f-9be9-458595c632d4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.849474] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a8bd9f9-4e44-43da-b285-3f30b3a34032 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.867197] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 984.867197] env[61978]: value = "task-1394977" [ 984.867197] env[61978]: _type = "Task" [ 984.867197] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.879877] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394977, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.934779] env[61978]: DEBUG nova.network.neutron [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Successfully updated port: fcd64700-31ef-4310-8986-b22e515b1c55 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 985.053264] env[61978]: DEBUG nova.compute.utils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 985.055206] env[61978]: DEBUG nova.compute.manager [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 985.055750] env[61978]: DEBUG nova.network.neutron [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 985.079908] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394971, 'name': ReconfigVM_Task, 'duration_secs': 0.547285} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.082349] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13/92eb5edb-803b-48d4-8c4f-338d7c3b3d13.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 985.083325] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e3019d2-fbbc-40fe-a113-585c69568ccc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.092035] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 985.092035] env[61978]: value = "task-1394978" [ 985.092035] env[61978]: _type = "Task" [ 985.092035] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.105237] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394978, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.111306] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1394972, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592158} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.112426] env[61978]: DEBUG nova.network.neutron [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updated VIF entry in instance network info cache for port 7417d7e9-723d-408d-bfa4-e583af757e79. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 985.112793] env[61978]: DEBUG nova.network.neutron [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance_info_cache with network_info: [{"id": "7417d7e9-723d-408d-bfa4-e583af757e79", "address": "fa:16:3e:e6:e3:c6", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7417d7e9-72", "ovs_interfaceid": "7417d7e9-723d-408d-bfa4-e583af757e79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.114498] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f1001633-e4e5-4de1-8a6b-cf653e43d821/f1001633-e4e5-4de1-8a6b-cf653e43d821.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 985.114788] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 985.115315] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f52b5379-a80f-4325-93cc-da54892febe4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.125082] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Waiting for the task: (returnval){ [ 985.125082] env[61978]: value = "task-1394979" [ 985.125082] env[61978]: _type = "Task" [ 985.125082] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.144353] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1394979, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.145945] env[61978]: DEBUG nova.policy [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b47b4554d1464808bc7810b31190b43a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '894db968242b469b87d1e9084d7c1dd0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 985.162112] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394973, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.175243] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5b7e065-0c89-40e6-8b35-401d34b0daae req-34aece05-c49d-4c3b-a6fb-f20a3c148912 service nova] Releasing lock "refresh_cache-f1001633-e4e5-4de1-8a6b-cf653e43d821" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.308322] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394976, 'name': CreateVM_Task, 'duration_secs': 0.547059} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.308490] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 985.309195] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.309360] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.309700] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 985.310031] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c56048f2-4778-43bd-8116-65afe124bad8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.317509] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 985.317509] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52138885-065e-4ac6-a226-47b6ad2103f8" [ 985.317509] env[61978]: _type = "Task" [ 985.317509] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.330023] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52138885-065e-4ac6-a226-47b6ad2103f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.379175] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394977, 'name': ReconfigVM_Task, 'duration_secs': 0.351393} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.379175] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 985.379568] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-730b0192-d2fc-43f2-ba42-042ec81c6b5a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.387535] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 985.387535] env[61978]: value = "task-1394980" [ 985.387535] env[61978]: _type = "Task" [ 985.387535] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.399062] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394980, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.438510] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.441011] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.441011] env[61978]: DEBUG nova.network.neutron [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 985.564733] env[61978]: DEBUG nova.compute.manager [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 985.611662] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394978, 'name': Rename_Task, 'duration_secs': 0.268996} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.612162] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 985.612557] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e764735c-7f06-499b-9430-8f3eeeb0c7b7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.618466] env[61978]: DEBUG oslo_concurrency.lockutils [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] Releasing lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.618745] env[61978]: DEBUG nova.compute.manager [req-8e4139f5-d9fc-412d-a10c-5048c769a79c req-35603a7d-103b-46ef-a760-ad188c4c7f14 service nova] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Received event network-vif-deleted-a33524bd-7627-49fa-ab70-55b0962b8ca3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 985.621229] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 985.621229] env[61978]: value = "task-1394981" [ 985.621229] env[61978]: _type = "Task" [ 985.621229] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.637023] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394981, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.642546] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1394979, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.168973} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.642812] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 985.644258] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e48df31-257a-4870-913d-e79c941d7800 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.680519] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] f1001633-e4e5-4de1-8a6b-cf653e43d821/f1001633-e4e5-4de1-8a6b-cf653e43d821.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 985.684115] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ede4541a-bbdb-486b-8f86-925c53684c7d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.708312] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394973, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.710512] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Waiting for the task: (returnval){ [ 985.710512] env[61978]: value = "task-1394982" [ 985.710512] env[61978]: _type = "Task" [ 985.710512] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.724021] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1394982, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.725495] env[61978]: DEBUG nova.network.neutron [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Successfully created port: 2c83f5d8-5390-4c18-a494-54cb1cee93e4 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 985.841656] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52138885-065e-4ac6-a226-47b6ad2103f8, 'name': SearchDatastore_Task, 'duration_secs': 0.012165} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.843225] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.843894] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.843894] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.844199] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.844199] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 985.845565] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c13cea5e-80bc-4636-8789-d7b9e83865cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.856960] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 985.857207] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 985.858093] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a0301e5-0585-44ad-a1d1-25c9cdd592e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.871202] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 985.871202] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a62885-c7a6-56a7-4249-4c39e4be7902" [ 985.871202] env[61978]: _type = "Task" [ 985.871202] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.882613] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a62885-c7a6-56a7-4249-4c39e4be7902, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.901928] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394980, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.018254] env[61978]: DEBUG nova.network.neutron [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 986.145697] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394981, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.169047] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394973, 'name': CloneVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.221440] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1394982, 'name': ReconfigVM_Task, 'duration_secs': 0.431198} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.224274] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Reconfigured VM instance instance-00000029 to attach disk [datastore2] f1001633-e4e5-4de1-8a6b-cf653e43d821/f1001633-e4e5-4de1-8a6b-cf653e43d821.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 986.224977] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29664a40-008c-4c13-936b-a9305fef91b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.235361] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Waiting for the task: (returnval){ [ 986.235361] env[61978]: value = "task-1394983" [ 986.235361] env[61978]: _type = "Task" [ 986.235361] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.245292] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1394983, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.293581] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd260d0-e0c7-4839-9a7b-7f8a4aff2577 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.302329] env[61978]: DEBUG nova.compute.manager [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Received event network-vif-plugged-fcd64700-31ef-4310-8986-b22e515b1c55 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 986.302748] env[61978]: DEBUG oslo_concurrency.lockutils [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] Acquiring lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.302972] env[61978]: DEBUG oslo_concurrency.lockutils [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] Lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.303258] env[61978]: DEBUG oslo_concurrency.lockutils [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] Lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.303530] env[61978]: DEBUG nova.compute.manager [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] No waiting events found dispatching network-vif-plugged-fcd64700-31ef-4310-8986-b22e515b1c55 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 986.303784] env[61978]: WARNING nova.compute.manager [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Received unexpected event network-vif-plugged-fcd64700-31ef-4310-8986-b22e515b1c55 for instance with vm_state building and task_state spawning. [ 986.304026] env[61978]: DEBUG nova.compute.manager [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Received event network-changed-fcd64700-31ef-4310-8986-b22e515b1c55 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 986.304252] env[61978]: DEBUG nova.compute.manager [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Refreshing instance network info cache due to event network-changed-fcd64700-31ef-4310-8986-b22e515b1c55. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 986.304493] env[61978]: DEBUG oslo_concurrency.lockutils [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] Acquiring lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.308717] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aafd3463-ef89-4341-8051-ef5917de2465 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.312950] env[61978]: DEBUG nova.network.neutron [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance_info_cache with network_info: [{"id": "fcd64700-31ef-4310-8986-b22e515b1c55", "address": "fa:16:3e:a7:22:5a", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd64700-31", "ovs_interfaceid": "fcd64700-31ef-4310-8986-b22e515b1c55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.350932] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff907b0-2561-4251-9e09-2792f11b7c09 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.360544] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3304e543-4e41-4834-b9f2-11c9e43dc852 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.377961] env[61978]: DEBUG nova.compute.provider_tree [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.390351] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a62885-c7a6-56a7-4249-4c39e4be7902, 'name': SearchDatastore_Task, 'duration_secs': 0.018258} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.394895] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38d90704-4c06-451e-be7b-5b500ab6db9d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.404013] env[61978]: DEBUG oslo_vmware.api [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1394980, 'name': PowerOnVM_Task, 'duration_secs': 0.870506} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.405568] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 986.406915] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 986.406915] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ea5665-bf97-6fcb-13e9-519f98172431" [ 986.406915] env[61978]: _type = "Task" [ 986.406915] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.412261] env[61978]: DEBUG nova.compute.manager [None req-76298257-e7c8-420e-bc8c-040bac69dba0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 986.413393] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cceda146-274c-42a9-9d4d-5f6b94acd605 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.421804] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ea5665-bf97-6fcb-13e9-519f98172431, 'name': SearchDatastore_Task, 'duration_secs': 0.010745} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.421804] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.421804] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c/59f32dd0-1faa-4059-9ef3-b177e8f4fa4c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 986.421804] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9e2ce17-956b-4de0-b272-3d356b05d019 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.435170] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 986.435170] env[61978]: value = "task-1394984" [ 986.435170] env[61978]: _type = "Task" [ 986.435170] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.441296] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1394984, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.580843] env[61978]: DEBUG nova.compute.manager [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 986.617600] env[61978]: DEBUG nova.virt.hardware [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 986.617857] env[61978]: DEBUG nova.virt.hardware [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 986.618226] env[61978]: DEBUG nova.virt.hardware [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.618508] env[61978]: DEBUG nova.virt.hardware [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 986.618666] env[61978]: DEBUG nova.virt.hardware [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.619386] env[61978]: DEBUG nova.virt.hardware [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 986.619647] env[61978]: DEBUG nova.virt.hardware [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 986.619820] env[61978]: DEBUG nova.virt.hardware [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 986.620046] env[61978]: DEBUG nova.virt.hardware [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 986.620201] env[61978]: DEBUG nova.virt.hardware [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 986.620408] env[61978]: DEBUG nova.virt.hardware [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 986.621333] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d199f86f-6130-4496-b9d7-c8e3b2f69fa3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.634332] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73371b86-21df-445d-bdcb-dd91e07d6dd1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.641631] env[61978]: DEBUG oslo_vmware.api [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1394981, 'name': PowerOnVM_Task, 'duration_secs': 0.575425} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.642524] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 986.642831] env[61978]: DEBUG nova.compute.manager [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 986.643846] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59b81f1-2222-422c-83ed-3aeda94d4328 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.681617] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1394973, 'name': CloneVM_Task, 'duration_secs': 2.02047} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.681617] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Created linked-clone VM from snapshot [ 986.684324] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb4f46a-0224-451b-8a9c-ea563271b0b1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.695806] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Uploading image 8bd7f71b-72a1-4c54-9f6c-3a3eaaf8b233 {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 986.731141] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 986.731141] env[61978]: value = "vm-295881" [ 986.731141] env[61978]: _type = "VirtualMachine" [ 986.731141] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 986.731891] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e4387555-aeae-4d2b-b3e5-b91a1d5e3583 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.746346] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lease: (returnval){ [ 986.746346] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c1bd39-979b-0a3b-9f07-185bfbb1697d" [ 986.746346] env[61978]: _type = "HttpNfcLease" [ 986.746346] env[61978]: } obtained for exporting VM: (result){ [ 986.746346] env[61978]: value = "vm-295881" [ 986.746346] env[61978]: _type = "VirtualMachine" [ 986.746346] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 986.746346] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the lease: (returnval){ [ 986.746346] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c1bd39-979b-0a3b-9f07-185bfbb1697d" [ 986.746346] env[61978]: _type = "HttpNfcLease" [ 986.746346] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 986.749113] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1394983, 'name': Rename_Task, 'duration_secs': 0.327745} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.752823] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 986.753190] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d433a0c-7b09-4aff-95e5-659d4841df85 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.761104] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 986.761104] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c1bd39-979b-0a3b-9f07-185bfbb1697d" [ 986.761104] env[61978]: _type = "HttpNfcLease" [ 986.761104] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 986.761353] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 986.761353] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c1bd39-979b-0a3b-9f07-185bfbb1697d" [ 986.761353] env[61978]: _type = "HttpNfcLease" [ 986.761353] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 986.762316] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f106877d-d6ce-4159-b73a-4658e3e37e88 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.767632] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Waiting for the task: (returnval){ [ 986.767632] env[61978]: value = "task-1394986" [ 986.767632] env[61978]: _type = "Task" [ 986.767632] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.775231] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f9e237-1ba9-4857-3f3c-c4ee9839d2c8/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 986.775448] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f9e237-1ba9-4857-3f3c-c4ee9839d2c8/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 986.841680] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.842182] env[61978]: DEBUG nova.compute.manager [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Instance network_info: |[{"id": "fcd64700-31ef-4310-8986-b22e515b1c55", "address": "fa:16:3e:a7:22:5a", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd64700-31", "ovs_interfaceid": "fcd64700-31ef-4310-8986-b22e515b1c55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 986.846394] env[61978]: DEBUG oslo_concurrency.lockutils [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] Acquired lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.846625] env[61978]: DEBUG nova.network.neutron [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Refreshing network info cache for port fcd64700-31ef-4310-8986-b22e515b1c55 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 986.848844] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:22:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fcd64700-31ef-4310-8986-b22e515b1c55', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.856554] env[61978]: DEBUG oslo.service.loopingcall [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 986.857224] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1394986, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.859767] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 986.860417] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bef8e7a8-3b02-4f62-be49-024cab89c979 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.884604] env[61978]: DEBUG nova.scheduler.client.report [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 986.888148] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.888148] env[61978]: value = "task-1394987" [ 986.888148] env[61978]: _type = "Task" [ 986.888148] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.902404] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394987, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.922724] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-70096f03-8276-46a8-a681-7eddaa56155d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.943625] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1394984, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49451} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.943903] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c/59f32dd0-1faa-4059-9ef3-b177e8f4fa4c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 986.944145] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 986.944421] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d6a2000-039d-434b-8335-a447113ee192 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.954271] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 986.954271] env[61978]: value = "task-1394988" [ 986.954271] env[61978]: _type = "Task" [ 986.954271] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.969339] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1394988, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.181399] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.281275] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1394986, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.395348] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.854s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.397877] env[61978]: DEBUG nova.compute.manager [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 987.399201] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.329s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.402138] env[61978]: INFO nova.compute.claims [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 987.413288] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394987, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.470638] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1394988, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080467} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.471302] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 987.472917] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53bf069-9907-489c-b218-08e5676b5b24 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.514401] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c/59f32dd0-1faa-4059-9ef3-b177e8f4fa4c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 987.518164] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4de1512d-013d-409f-a747-71d74488dc0b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.549504] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 987.549504] env[61978]: value = "task-1394989" [ 987.549504] env[61978]: _type = "Task" [ 987.549504] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.559651] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1394989, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.606423] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.606628] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.692813] env[61978]: DEBUG nova.network.neutron [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Successfully updated port: 2c83f5d8-5390-4c18-a494-54cb1cee93e4 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 987.703029] env[61978]: DEBUG nova.compute.manager [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 987.703707] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b33690b-5032-4da9-a939-68dcc85ec33d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.773853] env[61978]: DEBUG nova.network.neutron [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updated VIF entry in instance network info cache for port fcd64700-31ef-4310-8986-b22e515b1c55. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 987.775775] env[61978]: DEBUG nova.network.neutron [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance_info_cache with network_info: [{"id": "fcd64700-31ef-4310-8986-b22e515b1c55", "address": "fa:16:3e:a7:22:5a", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd64700-31", "ovs_interfaceid": "fcd64700-31ef-4310-8986-b22e515b1c55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.788678] env[61978]: DEBUG oslo_vmware.api [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1394986, 'name': PowerOnVM_Task, 'duration_secs': 0.70878} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.789065] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 987.789316] env[61978]: INFO nova.compute.manager [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Took 9.60 seconds to spawn the instance on the hypervisor. [ 987.789508] env[61978]: DEBUG nova.compute.manager [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 987.790556] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e065b5-6a9a-4922-b3e8-0c084dae3ee8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.902756] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394987, 'name': CreateVM_Task, 'duration_secs': 0.589797} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.903106] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 987.903949] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.904219] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.904811] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 987.905577] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4aa9968-da8b-4293-b90d-fcb682384dcb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.908855] env[61978]: DEBUG nova.compute.utils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 987.914127] env[61978]: DEBUG nova.compute.manager [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 987.914288] env[61978]: DEBUG nova.network.neutron [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 987.921929] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 987.921929] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52569f53-18d5-28a7-2441-57a371f5792c" [ 987.921929] env[61978]: _type = "Task" [ 987.921929] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.932936] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52569f53-18d5-28a7-2441-57a371f5792c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.999534] env[61978]: DEBUG nova.policy [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b47b4554d1464808bc7810b31190b43a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '894db968242b469b87d1e9084d7c1dd0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 988.042318] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "ea1c2d74-70b4-4547-a887-78e291c3082a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.043166] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "ea1c2d74-70b4-4547-a887-78e291c3082a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.043166] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "ea1c2d74-70b4-4547-a887-78e291c3082a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.043166] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "ea1c2d74-70b4-4547-a887-78e291c3082a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.043694] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "ea1c2d74-70b4-4547-a887-78e291c3082a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.046483] env[61978]: INFO nova.compute.manager [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Terminating instance [ 988.049080] env[61978]: DEBUG nova.compute.manager [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 988.049495] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 988.050592] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4cc2fe-cdb1-4537-82f3-43aa05bf8815 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.072312] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1394989, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.072822] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 988.073230] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e2339b3-7372-4206-b24e-699d1bb48220 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.080830] env[61978]: DEBUG oslo_vmware.api [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 988.080830] env[61978]: value = "task-1394990" [ 988.080830] env[61978]: _type = "Task" [ 988.080830] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.090441] env[61978]: DEBUG oslo_vmware.api [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.111573] env[61978]: DEBUG nova.compute.utils [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 988.197654] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "refresh_cache-b356fc81-f857-4416-8eb0-28c66d137967" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.198026] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquired lock "refresh_cache-b356fc81-f857-4416-8eb0-28c66d137967" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.198857] env[61978]: DEBUG nova.network.neutron [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.223940] env[61978]: INFO nova.compute.manager [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] instance snapshotting [ 988.223940] env[61978]: DEBUG nova.objects.instance [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'flavor' on Instance uuid c17c986e-c008-4414-8dd1-4ea836458048 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.279905] env[61978]: DEBUG oslo_concurrency.lockutils [req-a16a8205-52c4-4283-b291-5b87ad786bfa req-a6b13ea4-b6bd-42ae-a6e3-7d66e1551226 service nova] Releasing lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.315324] env[61978]: INFO nova.compute.manager [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Took 39.26 seconds to build instance. [ 988.412637] env[61978]: DEBUG nova.compute.manager [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 988.435492] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52569f53-18d5-28a7-2441-57a371f5792c, 'name': SearchDatastore_Task, 'duration_secs': 0.015781} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.436072] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.436336] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 988.436991] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.437213] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.437663] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 988.438092] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56dcf816-3c83-4a86-9dde-f16e76542ce3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.452436] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 988.454075] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 988.454075] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb8327f3-bfad-4a10-9312-d750404d56f4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.462014] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 988.462014] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52600a51-f034-8280-3909-b5edcb5a73a3" [ 988.462014] env[61978]: _type = "Task" [ 988.462014] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.480094] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52600a51-f034-8280-3909-b5edcb5a73a3, 'name': SearchDatastore_Task, 'duration_secs': 0.015518} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.481389] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d65f124-3ab8-4335-bde7-8fa54b14f2c3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.493189] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 988.493189] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5275853b-4ad4-4b76-7ac2-68731fafe850" [ 988.493189] env[61978]: _type = "Task" [ 988.493189] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.508223] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5275853b-4ad4-4b76-7ac2-68731fafe850, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.564102] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1394989, 'name': ReconfigVM_Task, 'duration_secs': 0.723553} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.564634] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c/59f32dd0-1faa-4059-9ef3-b177e8f4fa4c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 988.565446] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea9fd6f1-0439-493b-befd-eff4750c4379 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.577027] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 988.577027] env[61978]: value = "task-1394991" [ 988.577027] env[61978]: _type = "Task" [ 988.577027] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.591532] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1394991, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.601027] env[61978]: DEBUG oslo_vmware.api [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394990, 'name': PowerOffVM_Task, 'duration_secs': 0.263031} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.601645] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 988.602357] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.602538] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df672a87-198e-42b3-bb6f-bf0669be2908 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.621596] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.015s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.691678] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 988.691936] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 988.692165] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Deleting the datastore file [datastore1] ea1c2d74-70b4-4547-a887-78e291c3082a {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.692430] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a02be2d9-beeb-44f4-b875-57258abfeaf0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.700365] env[61978]: DEBUG oslo_vmware.api [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 988.700365] env[61978]: value = "task-1394993" [ 988.700365] env[61978]: _type = "Task" [ 988.700365] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.716605] env[61978]: DEBUG oslo_vmware.api [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394993, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.729252] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9b3577-f099-4d58-96ad-28f6bad333d3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.763712] env[61978]: DEBUG nova.network.neutron [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 988.766888] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c54a93-46d9-4b77-beed-c4e3f0eb9d77 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.812912] env[61978]: DEBUG nova.network.neutron [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Successfully created port: d9bef177-ead0-4f65-8781-806d6a34ea1d {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 988.824018] env[61978]: DEBUG oslo_concurrency.lockutils [None req-006f19a5-6e7a-4a9f-9377-d20c4b79d565 tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Lock "f1001633-e4e5-4de1-8a6b-cf653e43d821" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.619s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.951321] env[61978]: DEBUG nova.network.neutron [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Updating instance_info_cache with network_info: [{"id": "2c83f5d8-5390-4c18-a494-54cb1cee93e4", "address": "fa:16:3e:9a:4a:4b", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c83f5d8-53", "ovs_interfaceid": "2c83f5d8-5390-4c18-a494-54cb1cee93e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.006038] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5275853b-4ad4-4b76-7ac2-68731fafe850, 'name': SearchDatastore_Task, 'duration_secs': 0.016441} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.009231] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.009555] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f930ab49-c215-4b2e-92b1-21c0d52a70eb/f930ab49-c215-4b2e-92b1-21c0d52a70eb.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 989.010935] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a291d7da-40c6-49f0-be04-bf62f5b22296 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.016051] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.016192] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.021854] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 989.021854] env[61978]: value = "task-1394994" [ 989.021854] env[61978]: _type = "Task" [ 989.021854] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.023741] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7f3620-a7ae-423c-941a-5728bdfbff2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.041315] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2d357e-bb07-40d5-9d3d-7b2912260306 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.045975] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.077461] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67bf0712-8d8c-4d59-8cd3-e63e481741fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.094626] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10cd726-3a49-4016-b2b0-6d2c79f784fa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.099856] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1394991, 'name': Rename_Task, 'duration_secs': 0.280164} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.100268] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 989.101040] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4df57806-3d16-495e-bc2a-0d61c79e6739 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.112721] env[61978]: DEBUG nova.compute.provider_tree [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.116586] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 989.116586] env[61978]: value = "task-1394995" [ 989.116586] env[61978]: _type = "Task" [ 989.116586] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.132472] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1394995, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.214639] env[61978]: DEBUG oslo_vmware.api [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1394993, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256334} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.215464] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.216528] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.216528] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.216528] env[61978]: INFO nova.compute.manager [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Took 1.17 seconds to destroy the instance on the hypervisor. [ 989.216528] env[61978]: DEBUG oslo.service.loopingcall [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 989.216840] env[61978]: DEBUG nova.compute.manager [-] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 989.216945] env[61978]: DEBUG nova.network.neutron [-] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 989.281822] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 989.282546] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8a9e45fa-822e-44d2-b2e1-81611b13787b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.296496] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 989.296496] env[61978]: value = "task-1394996" [ 989.296496] env[61978]: _type = "Task" [ 989.296496] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.307200] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394996, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.326050] env[61978]: DEBUG nova.compute.manager [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 989.427612] env[61978]: DEBUG nova.compute.manager [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 989.453971] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Releasing lock "refresh_cache-b356fc81-f857-4416-8eb0-28c66d137967" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.453971] env[61978]: DEBUG nova.compute.manager [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Instance network_info: |[{"id": "2c83f5d8-5390-4c18-a494-54cb1cee93e4", "address": "fa:16:3e:9a:4a:4b", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c83f5d8-53", "ovs_interfaceid": "2c83f5d8-5390-4c18-a494-54cb1cee93e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 989.454505] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:4a:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c83f5d8-5390-4c18-a494-54cb1cee93e4', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 989.463598] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Creating folder: Project (894db968242b469b87d1e9084d7c1dd0). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 989.463968] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd45f139-7450-4a53-9591-58efc5df6ac6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.469283] env[61978]: DEBUG nova.virt.hardware [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 989.469283] env[61978]: DEBUG nova.virt.hardware [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 989.469283] env[61978]: DEBUG nova.virt.hardware [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.469699] env[61978]: DEBUG nova.virt.hardware [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 989.469699] env[61978]: DEBUG nova.virt.hardware [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.469699] env[61978]: DEBUG nova.virt.hardware [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 989.469896] env[61978]: DEBUG nova.virt.hardware [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 989.470717] env[61978]: DEBUG nova.virt.hardware [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 989.470717] env[61978]: DEBUG nova.virt.hardware [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 989.470717] env[61978]: DEBUG nova.virt.hardware [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 989.470717] env[61978]: DEBUG nova.virt.hardware [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 989.471936] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea8b134-2874-453c-931e-d8a874a0f819 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.483340] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9e436c-3426-4440-80cb-0efb9d407fc8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.489456] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Created folder: Project (894db968242b469b87d1e9084d7c1dd0) in parent group-v295764. [ 989.489697] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Creating folder: Instances. Parent ref: group-v295886. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 989.490510] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f6501dd-4f8f-4e31-a09c-b7182639549e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.506511] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Created folder: Instances in parent group-v295886. [ 989.506962] env[61978]: DEBUG oslo.service.loopingcall [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 989.507331] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 989.507630] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-016f7b24-fe0f-4ada-8dae-c4f5733d5cf4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.523883] env[61978]: DEBUG nova.compute.utils [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 989.547460] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 989.547460] env[61978]: value = "task-1394999" [ 989.547460] env[61978]: _type = "Task" [ 989.547460] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.551537] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394994, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.565441] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394999, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.619032] env[61978]: DEBUG nova.scheduler.client.report [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 990.450654] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.450938] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.451197] env[61978]: INFO nova.compute.manager [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Attaching volume 229fb92f-4196-4c02-99cd-5cfa5c710c35 to /dev/sdb [ 990.456570] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.440s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.457150] env[61978]: DEBUG nova.network.neutron [-] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.458503] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.059s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.459347] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1394995, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.459568] env[61978]: WARNING oslo_vmware.common.loopingcall [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] task run outlasted interval by 0.335267 sec [ 990.463190] env[61978]: DEBUG nova.compute.manager [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Received event network-vif-plugged-2c83f5d8-5390-4c18-a494-54cb1cee93e4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 990.463494] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] Acquiring lock "b356fc81-f857-4416-8eb0-28c66d137967-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.463616] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] Lock "b356fc81-f857-4416-8eb0-28c66d137967-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.463784] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] Lock "b356fc81-f857-4416-8eb0-28c66d137967-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.463960] env[61978]: DEBUG nova.compute.manager [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] [instance: b356fc81-f857-4416-8eb0-28c66d137967] No waiting events found dispatching network-vif-plugged-2c83f5d8-5390-4c18-a494-54cb1cee93e4 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 990.464143] env[61978]: WARNING nova.compute.manager [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Received unexpected event network-vif-plugged-2c83f5d8-5390-4c18-a494-54cb1cee93e4 for instance with vm_state building and task_state spawning. [ 990.464359] env[61978]: DEBUG nova.compute.manager [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Received event network-changed-2c83f5d8-5390-4c18-a494-54cb1cee93e4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 990.464477] env[61978]: DEBUG nova.compute.manager [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Refreshing instance network info cache due to event network-changed-2c83f5d8-5390-4c18-a494-54cb1cee93e4. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 990.464658] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] Acquiring lock "refresh_cache-b356fc81-f857-4416-8eb0-28c66d137967" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.464822] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] Acquired lock "refresh_cache-b356fc81-f857-4416-8eb0-28c66d137967" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.464991] env[61978]: DEBUG nova.network.neutron [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Refreshing network info cache for port 2c83f5d8-5390-4c18-a494-54cb1cee93e4 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 990.478084] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 22.458s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.504372] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.512990] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1394999, 'name': CreateVM_Task, 'duration_secs': 0.448802} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.515686] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1394996, 'name': CreateSnapshot_Task, 'duration_secs': 0.985356} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.515686] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1394994, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604927} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.515686] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 990.515686] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 990.515686] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f930ab49-c215-4b2e-92b1-21c0d52a70eb/f930ab49-c215-4b2e-92b1-21c0d52a70eb.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 990.515922] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 990.516137] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.516425] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.517821] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 990.517821] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbc49a1-cf9a-447b-a632-caa8d97cd9c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.520114] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b04561a4-4fdf-49d2-9ea0-c62934dfcb54 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.525451] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feff65c4-4bba-4c9e-84d7-72d739af338f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.527734] env[61978]: DEBUG oslo_vmware.api [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1394995, 'name': PowerOnVM_Task, 'duration_secs': 0.806876} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.528908] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.528908] env[61978]: INFO nova.compute.manager [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Took 9.65 seconds to spawn the instance on the hypervisor. [ 990.530950] env[61978]: DEBUG nova.compute.manager [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 990.530950] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7ab7d9-5d8e-443a-bef2-7b98679c1712 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.539863] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325fc1c7-fab0-42a4-91b2-98dff5773e5b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.543024] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 990.543024] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526f80fc-1218-f166-c4f4-fc4ea8d914b0" [ 990.543024] env[61978]: _type = "Task" [ 990.543024] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.548275] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 990.548275] env[61978]: value = "task-1395000" [ 990.548275] env[61978]: _type = "Task" [ 990.548275] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.564824] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9a139d-7e0a-4a1c-ba52-74d88a86757b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.577992] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526f80fc-1218-f166-c4f4-fc4ea8d914b0, 'name': SearchDatastore_Task, 'duration_secs': 0.020066} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.579025] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395000, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.582022] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.582022] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 990.582022] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.582022] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.582723] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.582723] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-906fd3a4-2331-4c03-b782-56c5e74cd8d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.594259] env[61978]: DEBUG nova.virt.block_device [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Updating existing volume attachment record: e62e8c2d-6399-445f-8dda-9eed2edc9ce9 {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 990.598824] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.599055] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 990.599858] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b564cd72-6678-40e0-99ed-66757e5e23fa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.612171] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 990.612171] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52139f69-4286-a819-70a1-dd817eb99aa3" [ 990.612171] env[61978]: _type = "Task" [ 990.612171] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.629274] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52139f69-4286-a819-70a1-dd817eb99aa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.865786] env[61978]: DEBUG nova.network.neutron [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Updated VIF entry in instance network info cache for port 2c83f5d8-5390-4c18-a494-54cb1cee93e4. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 990.865999] env[61978]: DEBUG nova.network.neutron [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Updating instance_info_cache with network_info: [{"id": "2c83f5d8-5390-4c18-a494-54cb1cee93e4", "address": "fa:16:3e:9a:4a:4b", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c83f5d8-53", "ovs_interfaceid": "2c83f5d8-5390-4c18-a494-54cb1cee93e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.960925] env[61978]: INFO nova.compute.manager [-] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Took 1.74 seconds to deallocate network for instance. [ 990.983470] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquiring lock "cde2125b-617e-4e13-8622-b81e46e15a41" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.983747] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lock "cde2125b-617e-4e13-8622-b81e46e15a41" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.055074] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 991.056589] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-35410b9d-0acb-4201-bec4-1805c9b2f6cd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.060383] env[61978]: DEBUG nova.compute.manager [req-d61b9ad3-eeee-4600-bcf4-ff12554aeaf8 req-29010f6f-74aa-443b-934c-766eb603402c service nova] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Received event network-vif-plugged-d9bef177-ead0-4f65-8781-806d6a34ea1d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 991.060653] env[61978]: DEBUG oslo_concurrency.lockutils [req-d61b9ad3-eeee-4600-bcf4-ff12554aeaf8 req-29010f6f-74aa-443b-934c-766eb603402c service nova] Acquiring lock "cb004a19-0048-4766-af7c-0fbde867f422-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.060878] env[61978]: DEBUG oslo_concurrency.lockutils [req-d61b9ad3-eeee-4600-bcf4-ff12554aeaf8 req-29010f6f-74aa-443b-934c-766eb603402c service nova] Lock "cb004a19-0048-4766-af7c-0fbde867f422-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.061073] env[61978]: DEBUG oslo_concurrency.lockutils [req-d61b9ad3-eeee-4600-bcf4-ff12554aeaf8 req-29010f6f-74aa-443b-934c-766eb603402c service nova] Lock "cb004a19-0048-4766-af7c-0fbde867f422-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.061248] env[61978]: DEBUG nova.compute.manager [req-d61b9ad3-eeee-4600-bcf4-ff12554aeaf8 req-29010f6f-74aa-443b-934c-766eb603402c service nova] [instance: cb004a19-0048-4766-af7c-0fbde867f422] No waiting events found dispatching network-vif-plugged-d9bef177-ead0-4f65-8781-806d6a34ea1d {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 991.061413] env[61978]: WARNING nova.compute.manager [req-d61b9ad3-eeee-4600-bcf4-ff12554aeaf8 req-29010f6f-74aa-443b-934c-766eb603402c service nova] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Received unexpected event network-vif-plugged-d9bef177-ead0-4f65-8781-806d6a34ea1d for instance with vm_state building and task_state spawning. [ 991.077734] env[61978]: INFO nova.compute.manager [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Took 38.12 seconds to build instance. [ 991.081106] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 991.081106] env[61978]: value = "task-1395004" [ 991.081106] env[61978]: _type = "Task" [ 991.081106] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.086729] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395000, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087433} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.092625] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 991.094148] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae0bfcf-bc98-4906-bce6-b2516141dd64 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.105154] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395004, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.125548] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] f930ab49-c215-4b2e-92b1-21c0d52a70eb/f930ab49-c215-4b2e-92b1-21c0d52a70eb.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 991.130855] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9eb2c9e6-115a-44b6-a486-3c2b3c8a1fbe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.153932] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52139f69-4286-a819-70a1-dd817eb99aa3, 'name': SearchDatastore_Task, 'duration_secs': 0.025271} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.156209] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 991.156209] env[61978]: value = "task-1395005" [ 991.156209] env[61978]: _type = "Task" [ 991.156209] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.156457] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-691ae551-7607-4519-a2e3-8b390d7a6cbc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.163543] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.163842] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.164032] env[61978]: INFO nova.compute.manager [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Attaching volume fa93f8e6-42e7-4249-a8fe-24b527dc71b5 to /dev/sdb [ 991.170726] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395005, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.171110] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 991.171110] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ca0052-44bd-d4d4-9768-41a1fd7d7538" [ 991.171110] env[61978]: _type = "Task" [ 991.171110] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.183808] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ca0052-44bd-d4d4-9768-41a1fd7d7538, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.219551] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e6bdfd-46f7-4031-a629-ee2b50c9a8a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.233593] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad18a7e-a5d4-4e33-aa1e-823bd6cc5601 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.249138] env[61978]: DEBUG nova.virt.block_device [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Updating existing volume attachment record: 0602f2e5-5b64-41ef-a4ca-ed6f78a4b4d9 {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 991.289861] env[61978]: DEBUG nova.network.neutron [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Successfully updated port: d9bef177-ead0-4f65-8781-806d6a34ea1d {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 991.378574] env[61978]: INFO nova.compute.manager [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Rebuilding instance [ 991.381682] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a953565-42f0-4de3-93e1-670149ef9854 req-5a3084ed-1f69-40f2-85b4-0357d95d36ed service nova] Releasing lock "refresh_cache-b356fc81-f857-4416-8eb0-28c66d137967" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.451070] env[61978]: DEBUG nova.compute.manager [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 991.452055] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c498dd-7d00-4c0a-8f78-7e673556c972 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.470121] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.486966] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lock "cde2125b-617e-4e13-8622-b81e46e15a41" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.487622] env[61978]: DEBUG nova.compute.manager [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 991.540402] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 92eb5edb-803b-48d4-8c4f-338d7c3b3d13 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.540599] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 243e7146-46fc-43f4-a83b-cdc58f397f9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.540677] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 85fc5af8-454d-4042-841a-945b7e84eb6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.540799] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance eb7cb200-c162-4e92-8916-6d9abd5cf34d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.540947] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 371ddf66-a39b-41c4-bbd1-2a1c1b99834e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 991.541077] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 96bef3f3-a45c-43ba-a86a-66c1d5686ea6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.541215] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b26a4784-698d-477a-8db7-58156899d231 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.541409] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 50788030-4dc2-4215-bf2c-acba5dd33ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.541409] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance c17c986e-c008-4414-8dd1-4ea836458048 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.541557] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance ff793464-9bef-449f-8485-36d3b8fb1d69 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 991.541685] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance e30d4a9f-1d75-453c-9552-2a0fbd4aa87d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 991.541818] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 32bcb974-8db9-43e2-b397-b497f3a4f30c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 991.541926] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f3c837fb-be7e-40a6-aae4-7f213c62ab2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.542079] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f22e097d-f1a5-414a-82cc-ab455db876c7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 991.542202] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.542331] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance dd686727-fc33-4dc4-b386-aabec27cf215 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.542441] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.542550] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance ea1c2d74-70b4-4547-a887-78e291c3082a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.542658] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 8a21e6a7-c34e-4af0-b1fd-8a501694614c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.542768] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.542878] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.542986] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance bdfdd685-e440-4f53-b6c4-2ee2f06acba8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.543109] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f1001633-e4e5-4de1-8a6b-cf653e43d821 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.543220] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.543328] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f930ab49-c215-4b2e-92b1-21c0d52a70eb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.543435] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b356fc81-f857-4416-8eb0-28c66d137967 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.543540] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance cb004a19-0048-4766-af7c-0fbde867f422 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.543647] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 2c1ce021-255f-454d-ba0e-c85380f3e973 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.582249] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4b44980-8931-4f3a-a400-c2ba99449b85 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.927s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.604306] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395004, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.673043] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395005, 'name': ReconfigVM_Task, 'duration_secs': 0.502033} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.673288] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Reconfigured VM instance instance-0000002b to attach disk [datastore2] f930ab49-c215-4b2e-92b1-21c0d52a70eb/f930ab49-c215-4b2e-92b1-21c0d52a70eb.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.674444] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3afc2aad-2c6e-43f9-af11-0e1f7283ed49 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.690876] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ca0052-44bd-d4d4-9768-41a1fd7d7538, 'name': SearchDatastore_Task, 'duration_secs': 0.016332} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.692637] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.692932] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] b356fc81-f857-4416-8eb0-28c66d137967/b356fc81-f857-4416-8eb0-28c66d137967.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 991.693373] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 991.693373] env[61978]: value = "task-1395007" [ 991.693373] env[61978]: _type = "Task" [ 991.693373] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.693586] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f9e2b83-f73a-411d-86b1-394444164b59 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.712116] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395007, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.713197] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 991.713197] env[61978]: value = "task-1395009" [ 991.713197] env[61978]: _type = "Task" [ 991.713197] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.725583] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395009, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.792346] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "refresh_cache-cb004a19-0048-4766-af7c-0fbde867f422" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.792561] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquired lock "refresh_cache-cb004a19-0048-4766-af7c-0fbde867f422" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.792747] env[61978]: DEBUG nova.network.neutron [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 991.964394] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.966109] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97fb1bad-9b68-42b6-b65e-05706c940bb0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.973281] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 991.973281] env[61978]: value = "task-1395011" [ 991.973281] env[61978]: _type = "Task" [ 991.973281] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.984908] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395011, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.995483] env[61978]: DEBUG nova.compute.utils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 991.996454] env[61978]: DEBUG nova.compute.manager [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 991.996583] env[61978]: DEBUG nova.network.neutron [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 992.047245] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 8f609401-af09-4291-a1e7-a356fbc4aac9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 992.085308] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 992.102237] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395004, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.210140] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395007, 'name': Rename_Task, 'duration_secs': 0.220212} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.210587] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 992.210867] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c59bb198-f39a-44d6-af05-a1e749de3bd7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.221848] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 992.221848] env[61978]: value = "task-1395012" [ 992.221848] env[61978]: _type = "Task" [ 992.221848] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.230145] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395009, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.237532] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395012, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.248199] env[61978]: DEBUG nova.policy [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae972d378696499cac6d36b3cae5d872', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ae4a07d37474c3095bc5fb5a7e16bd4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 992.349851] env[61978]: DEBUG nova.network.neutron [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 992.488623] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395011, 'name': PowerOffVM_Task, 'duration_secs': 0.445864} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.492223] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 992.492616] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.493933] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41acc60-4141-49e2-8b01-8e1eb0b37dc7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.499624] env[61978]: DEBUG nova.compute.manager [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 992.510293] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 992.511110] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f368209-5ad2-4a30-bdc9-1f8defa4037a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.552154] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 992.612642] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395004, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.615409] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 992.615409] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 992.615409] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleting the datastore file [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.615409] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc5f94f8-83b9-4561-8643-6560f93f60b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.624839] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 992.624839] env[61978]: value = "task-1395014" [ 992.624839] env[61978]: _type = "Task" [ 992.624839] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.639372] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395014, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.642942] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.659768] env[61978]: DEBUG nova.network.neutron [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Updating instance_info_cache with network_info: [{"id": "d9bef177-ead0-4f65-8781-806d6a34ea1d", "address": "fa:16:3e:76:5d:87", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9bef177-ea", "ovs_interfaceid": "d9bef177-ead0-4f65-8781-806d6a34ea1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.734244] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395012, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.739909] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395009, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.629524} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.740798] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] b356fc81-f857-4416-8eb0-28c66d137967/b356fc81-f857-4416-8eb0-28c66d137967.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 992.740798] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 992.741205] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-107ccd04-0cf7-42b5-a74c-8a65ed845db9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.752642] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 992.752642] env[61978]: value = "task-1395015" [ 992.752642] env[61978]: _type = "Task" [ 992.752642] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.767056] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395015, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.804855] env[61978]: DEBUG nova.compute.manager [req-f59a0ef0-f5ba-4e6d-81af-7fc6a281e343 req-7a5ee9dd-80c6-4161-8799-631ff7f91a6f service nova] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Received event network-vif-deleted-35d1b15c-e867-4d4b-8d09-58369a8e74e1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 992.964888] env[61978]: DEBUG nova.network.neutron [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Successfully created port: 6cc22552-945d-43cc-be37-f57b7f56d3b2 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 993.054646] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 4c7053ee-7c44-49ee-8d30-bf14686c6b1c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 993.102022] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395004, 'name': CloneVM_Task, 'duration_secs': 1.663279} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.102382] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Created linked-clone VM from snapshot [ 993.103308] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f32e39c-a79d-4767-9d1e-7b33d170f037 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.114244] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Uploading image 81a4ea36-3cda-42da-b32e-eca1d059e24b {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 993.138883] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395014, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248793} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.142149] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 993.142520] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 993.142912] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 993.153554] env[61978]: DEBUG oslo_vmware.rw_handles [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 993.153554] env[61978]: value = "vm-295892" [ 993.153554] env[61978]: _type = "VirtualMachine" [ 993.153554] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 993.153923] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-886ca241-94a4-4601-999f-a3c59572a285 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.163289] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Releasing lock "refresh_cache-cb004a19-0048-4766-af7c-0fbde867f422" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.163750] env[61978]: DEBUG nova.compute.manager [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Instance network_info: |[{"id": "d9bef177-ead0-4f65-8781-806d6a34ea1d", "address": "fa:16:3e:76:5d:87", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9bef177-ea", "ovs_interfaceid": "d9bef177-ead0-4f65-8781-806d6a34ea1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 993.164184] env[61978]: DEBUG oslo_vmware.rw_handles [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lease: (returnval){ [ 993.164184] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526305d7-b657-1273-ef50-a4a9c8dc7de1" [ 993.164184] env[61978]: _type = "HttpNfcLease" [ 993.164184] env[61978]: } obtained for exporting VM: (result){ [ 993.164184] env[61978]: value = "vm-295892" [ 993.164184] env[61978]: _type = "VirtualMachine" [ 993.164184] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 993.165263] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the lease: (returnval){ [ 993.165263] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526305d7-b657-1273-ef50-a4a9c8dc7de1" [ 993.165263] env[61978]: _type = "HttpNfcLease" [ 993.165263] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 993.165263] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:5d:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9bef177-ead0-4f65-8781-806d6a34ea1d', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 993.173808] env[61978]: DEBUG oslo.service.loopingcall [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.173942] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 993.180279] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6db6cba-b281-4dbb-9830-e91923130eb4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.204022] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 993.204022] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526305d7-b657-1273-ef50-a4a9c8dc7de1" [ 993.204022] env[61978]: _type = "HttpNfcLease" [ 993.204022] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 993.204022] env[61978]: DEBUG oslo_vmware.rw_handles [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 993.204022] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526305d7-b657-1273-ef50-a4a9c8dc7de1" [ 993.204022] env[61978]: _type = "HttpNfcLease" [ 993.204022] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 993.204529] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b1443e-a495-4a6c-ad8a-6eaad6c05f83 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.213311] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 993.213311] env[61978]: value = "task-1395018" [ 993.213311] env[61978]: _type = "Task" [ 993.213311] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.220169] env[61978]: DEBUG oslo_vmware.rw_handles [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f402bf-2e44-b898-316f-388237347862/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 993.220669] env[61978]: DEBUG oslo_vmware.rw_handles [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f402bf-2e44-b898-316f-388237347862/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 993.302131] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395018, 'name': CreateVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.314712] env[61978]: DEBUG oslo_vmware.api [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395012, 'name': PowerOnVM_Task, 'duration_secs': 1.009382} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.315227] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395015, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079127} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.315428] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 993.315783] env[61978]: INFO nova.compute.manager [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Took 9.68 seconds to spawn the instance on the hypervisor. [ 993.316081] env[61978]: DEBUG nova.compute.manager [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 993.316415] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 993.317303] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fdbe6c-a154-4848-9135-4a18dfacc2be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.320699] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6dd698-11a3-4c37-830b-8634dcc492da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.357620] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] b356fc81-f857-4416-8eb0-28c66d137967/b356fc81-f857-4416-8eb0-28c66d137967.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.358564] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec5097a2-5d4c-4fb6-a041-772d3a77d1f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.377316] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2140da7e-9a3b-4c85-8651-5f437da54991 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.388025] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 993.388025] env[61978]: value = "task-1395019" [ 993.388025] env[61978]: _type = "Task" [ 993.388025] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.398044] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395019, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.428539] env[61978]: DEBUG nova.compute.manager [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Received event network-changed-d9bef177-ead0-4f65-8781-806d6a34ea1d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 993.428737] env[61978]: DEBUG nova.compute.manager [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Refreshing instance network info cache due to event network-changed-d9bef177-ead0-4f65-8781-806d6a34ea1d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 993.428959] env[61978]: DEBUG oslo_concurrency.lockutils [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] Acquiring lock "refresh_cache-cb004a19-0048-4766-af7c-0fbde867f422" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.429132] env[61978]: DEBUG oslo_concurrency.lockutils [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] Acquired lock "refresh_cache-cb004a19-0048-4766-af7c-0fbde867f422" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.429291] env[61978]: DEBUG nova.network.neutron [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Refreshing network info cache for port d9bef177-ead0-4f65-8781-806d6a34ea1d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 993.519566] env[61978]: DEBUG nova.compute.manager [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 993.558029] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 0d48ae5d-7cc8-42b3-a993-44636e9cb171 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 993.621617] env[61978]: DEBUG nova.virt.hardware [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 993.622352] env[61978]: DEBUG nova.virt.hardware [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 993.622505] env[61978]: DEBUG nova.virt.hardware [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 993.624568] env[61978]: DEBUG nova.virt.hardware [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 993.624568] env[61978]: DEBUG nova.virt.hardware [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 993.624568] env[61978]: DEBUG nova.virt.hardware [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 993.624568] env[61978]: DEBUG nova.virt.hardware [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 993.624568] env[61978]: DEBUG nova.virt.hardware [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 993.625018] env[61978]: DEBUG nova.virt.hardware [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 993.625018] env[61978]: DEBUG nova.virt.hardware [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 993.625018] env[61978]: DEBUG nova.virt.hardware [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 993.625837] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695c8eef-19b7-4daf-b87a-8014ab4e5a1a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.636855] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8a1f7a-5ce4-444a-9fd4-5f2c1b5f788e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.729762] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395018, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.833351] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.833770] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.868051] env[61978]: INFO nova.compute.manager [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Took 36.57 seconds to build instance. [ 993.900068] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.062855] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 7e71c8de-1f94-4161-8ad8-a67792c5ce24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.226672] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395018, 'name': CreateVM_Task, 'duration_secs': 0.551363} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.227064] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 994.228217] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.228363] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.228739] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 994.231620] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c5df0da-bb9f-4ccc-90bc-4f0fe4d93fb2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.240024] env[61978]: DEBUG nova.network.neutron [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Updated VIF entry in instance network info cache for port d9bef177-ead0-4f65-8781-806d6a34ea1d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 994.241420] env[61978]: DEBUG nova.network.neutron [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Updating instance_info_cache with network_info: [{"id": "d9bef177-ead0-4f65-8781-806d6a34ea1d", "address": "fa:16:3e:76:5d:87", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9bef177-ea", "ovs_interfaceid": "d9bef177-ead0-4f65-8781-806d6a34ea1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.244542] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 994.244542] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c6d8f8-6613-8b78-3b18-1e65da7c69ba" [ 994.244542] env[61978]: _type = "Task" [ 994.244542] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.259609] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c6d8f8-6613-8b78-3b18-1e65da7c69ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.368902] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8b07803-ff44-4df2-9c20-24b692a5383d tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.930s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.398239] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395019, 'name': ReconfigVM_Task, 'duration_secs': 0.574889} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.398565] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Reconfigured VM instance instance-0000002c to attach disk [datastore2] b356fc81-f857-4416-8eb0-28c66d137967/b356fc81-f857-4416-8eb0-28c66d137967.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.399414] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51e60c69-a417-423b-b7cd-ee2846a4a71c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.410037] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 994.410037] env[61978]: value = "task-1395021" [ 994.410037] env[61978]: _type = "Task" [ 994.410037] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.421094] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395021, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.566851] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.745032] env[61978]: DEBUG oslo_concurrency.lockutils [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] Releasing lock "refresh_cache-cb004a19-0048-4766-af7c-0fbde867f422" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.745397] env[61978]: DEBUG nova.compute.manager [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Received event network-changed-631d15db-8176-407e-8ab9-1b7e5a095d9a {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 994.745803] env[61978]: DEBUG nova.compute.manager [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Refreshing instance network info cache due to event network-changed-631d15db-8176-407e-8ab9-1b7e5a095d9a. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 994.746357] env[61978]: DEBUG oslo_concurrency.lockutils [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] Acquiring lock "refresh_cache-f1001633-e4e5-4de1-8a6b-cf653e43d821" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.746519] env[61978]: DEBUG oslo_concurrency.lockutils [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] Acquired lock "refresh_cache-f1001633-e4e5-4de1-8a6b-cf653e43d821" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.746691] env[61978]: DEBUG nova.network.neutron [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Refreshing network info cache for port 631d15db-8176-407e-8ab9-1b7e5a095d9a {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 994.765912] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c6d8f8-6613-8b78-3b18-1e65da7c69ba, 'name': SearchDatastore_Task, 'duration_secs': 0.022855} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.766200] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.766521] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 994.766954] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.767179] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.767380] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 994.767934] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7e28f8c-2be8-4132-a0b1-c16df8cc029e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.778750] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 994.778960] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 994.779700] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5d85191-fa0b-487b-a211-54af333b64d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.786680] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 994.786680] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f8f0e5-5d43-4056-f64e-fc1fcdb86a41" [ 994.786680] env[61978]: _type = "Task" [ 994.786680] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.796943] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f8f0e5-5d43-4056-f64e-fc1fcdb86a41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.864653] env[61978]: DEBUG nova.network.neutron [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Successfully updated port: 6cc22552-945d-43cc-be37-f57b7f56d3b2 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 994.872781] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 994.877428] env[61978]: DEBUG nova.compute.manager [req-6c7517e6-8c9c-404c-9355-db639cd4fa6e req-6310efd8-977b-49a2-8f63-6520deeff539 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Received event network-changed-7417d7e9-723d-408d-bfa4-e583af757e79 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 994.877845] env[61978]: DEBUG nova.compute.manager [req-6c7517e6-8c9c-404c-9355-db639cd4fa6e req-6310efd8-977b-49a2-8f63-6520deeff539 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Refreshing instance network info cache due to event network-changed-7417d7e9-723d-408d-bfa4-e583af757e79. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 994.878231] env[61978]: DEBUG oslo_concurrency.lockutils [req-6c7517e6-8c9c-404c-9355-db639cd4fa6e req-6310efd8-977b-49a2-8f63-6520deeff539 service nova] Acquiring lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.878540] env[61978]: DEBUG oslo_concurrency.lockutils [req-6c7517e6-8c9c-404c-9355-db639cd4fa6e req-6310efd8-977b-49a2-8f63-6520deeff539 service nova] Acquired lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.878840] env[61978]: DEBUG nova.network.neutron [req-6c7517e6-8c9c-404c-9355-db639cd4fa6e req-6310efd8-977b-49a2-8f63-6520deeff539 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Refreshing network info cache for port 7417d7e9-723d-408d-bfa4-e583af757e79 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 994.922958] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395021, 'name': Rename_Task, 'duration_secs': 0.252359} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.923553] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.923924] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-106ea54e-af5f-4be3-bc02-031914d4688a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.934376] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 994.934376] env[61978]: value = "task-1395022" [ 994.934376] env[61978]: _type = "Task" [ 994.934376] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.945267] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395022, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.070526] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance adf25af8-28c4-444e-b849-88d643f57dcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 995.070921] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 23 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 995.071134] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=5056MB phys_disk=200GB used_disk=23GB total_vcpus=48 used_vcpus=23 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 995.300183] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f8f0e5-5d43-4056-f64e-fc1fcdb86a41, 'name': SearchDatastore_Task, 'duration_secs': 0.022458} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.304584] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d6d6ce7-fdf8-44db-8f1a-a4a0480ddf5d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.313957] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 995.313957] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5243810f-899c-1794-d203-3151a03c4744" [ 995.313957] env[61978]: _type = "Task" [ 995.313957] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.329351] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5243810f-899c-1794-d203-3151a03c4744, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.361547] env[61978]: DEBUG nova.compute.manager [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Stashing vm_state: active {{(pid=61978) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 995.367671] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquiring lock "refresh_cache-2c1ce021-255f-454d-ba0e-c85380f3e973" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.367987] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquired lock "refresh_cache-2c1ce021-255f-454d-ba0e-c85380f3e973" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.368159] env[61978]: DEBUG nova.network.neutron [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 995.407336] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.449812] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395022, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.664150] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4101fb35-eb9f-43b2-aeaf-216718bd608f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.675924] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0dbe7a-cdc5-4d14-bfbc-9321b969513e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.681096] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 995.681548] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295891', 'volume_id': '229fb92f-4196-4c02-99cd-5cfa5c710c35', 'name': 'volume-229fb92f-4196-4c02-99cd-5cfa5c710c35', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f3c837fb-be7e-40a6-aae4-7f213c62ab2c', 'attached_at': '', 'detached_at': '', 'volume_id': '229fb92f-4196-4c02-99cd-5cfa5c710c35', 'serial': '229fb92f-4196-4c02-99cd-5cfa5c710c35'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 995.682217] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9936379-bfd4-4bc5-8ced-da2ae6ba9598 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.685947] env[61978]: DEBUG nova.network.neutron [req-6c7517e6-8c9c-404c-9355-db639cd4fa6e req-6310efd8-977b-49a2-8f63-6520deeff539 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updated VIF entry in instance network info cache for port 7417d7e9-723d-408d-bfa4-e583af757e79. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.686388] env[61978]: DEBUG nova.network.neutron [req-6c7517e6-8c9c-404c-9355-db639cd4fa6e req-6310efd8-977b-49a2-8f63-6520deeff539 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance_info_cache with network_info: [{"id": "7417d7e9-723d-408d-bfa4-e583af757e79", "address": "fa:16:3e:e6:e3:c6", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7417d7e9-72", "ovs_interfaceid": "7417d7e9-723d-408d-bfa4-e583af757e79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.688894] env[61978]: DEBUG nova.network.neutron [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Updated VIF entry in instance network info cache for port 631d15db-8176-407e-8ab9-1b7e5a095d9a. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.689478] env[61978]: DEBUG nova.network.neutron [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Updating instance_info_cache with network_info: [{"id": "631d15db-8176-407e-8ab9-1b7e5a095d9a", "address": "fa:16:3e:e3:c0:f1", "network": {"id": "6ca2a9c9-ad13-4e1f-9a03-8a2d2f942025", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-354897565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4ea5d0199a04f959e59a8e134687392", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631d15db-81", "ovs_interfaceid": "631d15db-8176-407e-8ab9-1b7e5a095d9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.733425] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1416bf-edd4-460c-81b7-5998bb5039b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.737545] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03227215-2435-4736-b16e-1fa791f8bb8c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.763553] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9a815b-69d3-4225-a0b8-60fefa340554 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.779828] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] volume-229fb92f-4196-4c02-99cd-5cfa5c710c35/volume-229fb92f-4196-4c02-99cd-5cfa5c710c35.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.779828] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95b3a72c-53c8-4651-9b3b-c44ab8fb597d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.805910] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.810538] env[61978]: DEBUG oslo_vmware.api [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 995.810538] env[61978]: value = "task-1395023" [ 995.810538] env[61978]: _type = "Task" [ 995.810538] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.828127] env[61978]: DEBUG oslo_vmware.api [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395023, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.832678] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5243810f-899c-1794-d203-3151a03c4744, 'name': SearchDatastore_Task, 'duration_secs': 0.016613} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.832980] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.833487] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] cb004a19-0048-4766-af7c-0fbde867f422/cb004a19-0048-4766-af7c-0fbde867f422.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 995.833811] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71a3352e-04bd-4ae2-b263-1a5134d89c0f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.844747] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 995.844747] env[61978]: value = "task-1395024" [ 995.844747] env[61978]: _type = "Task" [ 995.844747] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.855563] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.884106] env[61978]: DEBUG oslo_concurrency.lockutils [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.919184] env[61978]: DEBUG nova.network.neutron [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 995.933682] env[61978]: DEBUG nova.compute.manager [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Received event network-vif-plugged-6cc22552-945d-43cc-be37-f57b7f56d3b2 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 995.933953] env[61978]: DEBUG oslo_concurrency.lockutils [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] Acquiring lock "2c1ce021-255f-454d-ba0e-c85380f3e973-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.934369] env[61978]: DEBUG oslo_concurrency.lockutils [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] Lock "2c1ce021-255f-454d-ba0e-c85380f3e973-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.934635] env[61978]: DEBUG oslo_concurrency.lockutils [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] Lock "2c1ce021-255f-454d-ba0e-c85380f3e973-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.934861] env[61978]: DEBUG nova.compute.manager [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] No waiting events found dispatching network-vif-plugged-6cc22552-945d-43cc-be37-f57b7f56d3b2 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 995.935148] env[61978]: WARNING nova.compute.manager [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Received unexpected event network-vif-plugged-6cc22552-945d-43cc-be37-f57b7f56d3b2 for instance with vm_state building and task_state spawning. [ 995.935404] env[61978]: DEBUG nova.compute.manager [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Received event network-changed-6cc22552-945d-43cc-be37-f57b7f56d3b2 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 995.935589] env[61978]: DEBUG nova.compute.manager [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Refreshing instance network info cache due to event network-changed-6cc22552-945d-43cc-be37-f57b7f56d3b2. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 995.935821] env[61978]: DEBUG oslo_concurrency.lockutils [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] Acquiring lock "refresh_cache-2c1ce021-255f-454d-ba0e-c85380f3e973" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.951851] env[61978]: DEBUG oslo_vmware.api [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395022, 'name': PowerOnVM_Task, 'duration_secs': 0.911536} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.952589] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.952589] env[61978]: INFO nova.compute.manager [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Took 9.37 seconds to spawn the instance on the hypervisor. [ 995.953213] env[61978]: DEBUG nova.compute.manager [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 995.954139] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008fedb7-49de-45f1-af88-bb41f8ff5931 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.119362] env[61978]: DEBUG nova.network.neutron [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Updating instance_info_cache with network_info: [{"id": "6cc22552-945d-43cc-be37-f57b7f56d3b2", "address": "fa:16:3e:98:0a:2b", "network": {"id": "c9fe3a73-eb7c-4b01-93ed-10d286a49d32", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-230759624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ae4a07d37474c3095bc5fb5a7e16bd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cc22552-94", "ovs_interfaceid": "6cc22552-945d-43cc-be37-f57b7f56d3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.189637] env[61978]: DEBUG oslo_concurrency.lockutils [req-6c7517e6-8c9c-404c-9355-db639cd4fa6e req-6310efd8-977b-49a2-8f63-6520deeff539 service nova] Releasing lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.191555] env[61978]: DEBUG oslo_concurrency.lockutils [req-fcae967f-4b21-44ec-aa59-747ffe3a126c req-94fedde5-c0cc-4c71-a5b6-d1c88b066b70 service nova] Releasing lock "refresh_cache-f1001633-e4e5-4de1-8a6b-cf653e43d821" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.312693] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 996.321221] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 996.321518] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295894', 'volume_id': 'fa93f8e6-42e7-4249-a8fe-24b527dc71b5', 'name': 'volume-fa93f8e6-42e7-4249-a8fe-24b527dc71b5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '96bef3f3-a45c-43ba-a86a-66c1d5686ea6', 'attached_at': '', 'detached_at': '', 'volume_id': 'fa93f8e6-42e7-4249-a8fe-24b527dc71b5', 'serial': 'fa93f8e6-42e7-4249-a8fe-24b527dc71b5'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 996.322701] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c417e50-6fe8-44dd-81b1-ccc11f0afff9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.348018] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbc494c-7081-467c-9d55-a062001907b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.351169] env[61978]: DEBUG oslo_vmware.api [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395023, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.363326] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395024, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.386639] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] volume-fa93f8e6-42e7-4249-a8fe-24b527dc71b5/volume-fa93f8e6-42e7-4249-a8fe-24b527dc71b5.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.388217] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5198d57f-e2d0-4d4c-8cfb-6187bb955e97 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.414853] env[61978]: DEBUG oslo_vmware.api [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 996.414853] env[61978]: value = "task-1395025" [ 996.414853] env[61978]: _type = "Task" [ 996.414853] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.430249] env[61978]: DEBUG oslo_vmware.api [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395025, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.485030] env[61978]: INFO nova.compute.manager [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Took 37.01 seconds to build instance. [ 996.622985] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Releasing lock "refresh_cache-2c1ce021-255f-454d-ba0e-c85380f3e973" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.623500] env[61978]: DEBUG nova.compute.manager [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Instance network_info: |[{"id": "6cc22552-945d-43cc-be37-f57b7f56d3b2", "address": "fa:16:3e:98:0a:2b", "network": {"id": "c9fe3a73-eb7c-4b01-93ed-10d286a49d32", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-230759624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ae4a07d37474c3095bc5fb5a7e16bd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cc22552-94", "ovs_interfaceid": "6cc22552-945d-43cc-be37-f57b7f56d3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 996.623973] env[61978]: DEBUG oslo_concurrency.lockutils [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] Acquired lock "refresh_cache-2c1ce021-255f-454d-ba0e-c85380f3e973" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.624357] env[61978]: DEBUG nova.network.neutron [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Refreshing network info cache for port 6cc22552-945d-43cc-be37-f57b7f56d3b2 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 996.626743] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:0a:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6cc22552-945d-43cc-be37-f57b7f56d3b2', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 996.634843] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Creating folder: Project (3ae4a07d37474c3095bc5fb5a7e16bd4). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 996.636142] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3bdb1dd2-408d-4e20-b756-d0ada44b294e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.651696] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Created folder: Project (3ae4a07d37474c3095bc5fb5a7e16bd4) in parent group-v295764. [ 996.651910] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Creating folder: Instances. Parent ref: group-v295896. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 996.652197] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3629f7c-09e3-403c-94a7-99766f1f46a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.664405] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Created folder: Instances in parent group-v295896. [ 996.664833] env[61978]: DEBUG oslo.service.loopingcall [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 996.665354] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 996.665667] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9aa44de-951a-40f0-9b39-cb82f5c34e0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.688116] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 996.688116] env[61978]: value = "task-1395028" [ 996.688116] env[61978]: _type = "Task" [ 996.688116] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.697454] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395028, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.822922] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 996.823276] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.348s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.823674] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.434s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.825889] env[61978]: INFO nova.compute.claims [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.838923] env[61978]: DEBUG oslo_vmware.api [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395023, 'name': ReconfigVM_Task, 'duration_secs': 0.973929} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.839387] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Reconfigured VM instance instance-0000001f to attach disk [datastore2] volume-229fb92f-4196-4c02-99cd-5cfa5c710c35/volume-229fb92f-4196-4c02-99cd-5cfa5c710c35.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.847333] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1792cbcd-320d-4d69-96ef-a512449263a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.878783] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.767042} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.881865] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] cb004a19-0048-4766-af7c-0fbde867f422/cb004a19-0048-4766-af7c-0fbde867f422.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 996.882231] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 996.882693] env[61978]: DEBUG oslo_vmware.api [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 996.882693] env[61978]: value = "task-1395029" [ 996.882693] env[61978]: _type = "Task" [ 996.882693] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.882946] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-971914be-7d1b-4986-a160-53d37ab029c2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.902697] env[61978]: DEBUG oslo_vmware.api [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395029, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.904832] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 996.904832] env[61978]: value = "task-1395030" [ 996.904832] env[61978]: _type = "Task" [ 996.904832] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.909929] env[61978]: INFO nova.compute.manager [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Rescuing [ 996.910322] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "refresh_cache-8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.910618] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "refresh_cache-8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.910742] env[61978]: DEBUG nova.network.neutron [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 996.936710] env[61978]: DEBUG oslo_vmware.api [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395025, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.986521] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5c6403e3-1e40-4060-b199-10d0850c7050 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "b356fc81-f857-4416-8eb0-28c66d137967" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.737s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.123451] env[61978]: DEBUG nova.virt.hardware [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 997.123813] env[61978]: DEBUG nova.virt.hardware [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 997.124038] env[61978]: DEBUG nova.virt.hardware [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.124254] env[61978]: DEBUG nova.virt.hardware [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 997.124405] env[61978]: DEBUG nova.virt.hardware [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.124556] env[61978]: DEBUG nova.virt.hardware [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 997.124852] env[61978]: DEBUG nova.virt.hardware [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 997.125108] env[61978]: DEBUG nova.virt.hardware [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 997.125498] env[61978]: DEBUG nova.virt.hardware [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 997.125805] env[61978]: DEBUG nova.virt.hardware [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 997.126087] env[61978]: DEBUG nova.virt.hardware [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 997.129123] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2826fc93-0ac2-4772-a5ac-ba1b6d401c72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.140375] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f9e237-1ba9-4857-3f3c-c4ee9839d2c8/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 997.143287] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559787d0-5f8b-44d4-bf07-2d77c9e905cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.147548] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a30dafd-1c96-40ce-bf01-626e148fbfeb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.157413] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f9e237-1ba9-4857-3f3c-c4ee9839d2c8/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 997.157413] env[61978]: ERROR oslo_vmware.rw_handles [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f9e237-1ba9-4857-3f3c-c4ee9839d2c8/disk-0.vmdk due to incomplete transfer. [ 997.164950] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c7719eba-971e-4021-b1f8-afe15b85b072 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.167329] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:56:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4c8c8fd-baca-4e60-97dc-ff0418d63215', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15ee1476-11da-4794-a070-c4365a572948', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 997.174871] env[61978]: DEBUG oslo.service.loopingcall [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 997.175245] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 997.176188] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55453e3a-713c-4881-84d6-364aa00bc58e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.192516] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f9e237-1ba9-4857-3f3c-c4ee9839d2c8/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 997.192729] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Uploaded image 8bd7f71b-72a1-4c54-9f6c-3a3eaaf8b233 to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 997.197379] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 997.200190] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e4ba81a2-eb6a-4df5-86fe-3a5481bd3472 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.207274] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 997.207274] env[61978]: value = "task-1395031" [ 997.207274] env[61978]: _type = "Task" [ 997.207274] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.210828] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395028, 'name': CreateVM_Task, 'duration_secs': 0.449967} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.215093] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 997.215587] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 997.215587] env[61978]: value = "task-1395032" [ 997.215587] env[61978]: _type = "Task" [ 997.215587] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.216330] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.216796] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.217231] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 997.217941] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-165b7647-2b01-4b72-8bcc-a161a896ed7c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.226370] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395031, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.231936] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Waiting for the task: (returnval){ [ 997.231936] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5252de23-e7f5-2e7a-9090-026b28033450" [ 997.231936] env[61978]: _type = "Task" [ 997.231936] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.234652] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1395032, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.244094] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.244457] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5252de23-e7f5-2e7a-9090-026b28033450, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.244675] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.396610] env[61978]: DEBUG oslo_vmware.api [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395029, 'name': ReconfigVM_Task, 'duration_secs': 0.200584} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.396959] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295891', 'volume_id': '229fb92f-4196-4c02-99cd-5cfa5c710c35', 'name': 'volume-229fb92f-4196-4c02-99cd-5cfa5c710c35', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f3c837fb-be7e-40a6-aae4-7f213c62ab2c', 'attached_at': '', 'detached_at': '', 'volume_id': '229fb92f-4196-4c02-99cd-5cfa5c710c35', 'serial': '229fb92f-4196-4c02-99cd-5cfa5c710c35'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 997.419124] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395030, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114112} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.423921] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 997.425445] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff9439f-9e1e-4f37-acec-d1487ec6fb44 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.435718] env[61978]: DEBUG oslo_vmware.api [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395025, 'name': ReconfigVM_Task, 'duration_secs': 0.687481} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.445515] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Reconfigured VM instance instance-00000012 to attach disk [datastore2] volume-fa93f8e6-42e7-4249-a8fe-24b527dc71b5/volume-fa93f8e6-42e7-4249-a8fe-24b527dc71b5.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 997.460790] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] cb004a19-0048-4766-af7c-0fbde867f422/cb004a19-0048-4766-af7c-0fbde867f422.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 997.463123] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4d3afc3-98c1-43c4-b436-33654e259dd4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.473367] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9695db36-516e-4c43-b55f-859f34694982 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.489495] env[61978]: DEBUG nova.compute.manager [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 997.495387] env[61978]: DEBUG nova.network.neutron [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Updated VIF entry in instance network info cache for port 6cc22552-945d-43cc-be37-f57b7f56d3b2. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 997.496086] env[61978]: DEBUG nova.network.neutron [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Updating instance_info_cache with network_info: [{"id": "6cc22552-945d-43cc-be37-f57b7f56d3b2", "address": "fa:16:3e:98:0a:2b", "network": {"id": "c9fe3a73-eb7c-4b01-93ed-10d286a49d32", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-230759624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ae4a07d37474c3095bc5fb5a7e16bd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cc22552-94", "ovs_interfaceid": "6cc22552-945d-43cc-be37-f57b7f56d3b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.499757] env[61978]: DEBUG oslo_vmware.api [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 997.499757] env[61978]: value = "task-1395034" [ 997.499757] env[61978]: _type = "Task" [ 997.499757] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.499757] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 997.499757] env[61978]: value = "task-1395033" [ 997.499757] env[61978]: _type = "Task" [ 997.499757] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.517192] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395033, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.517513] env[61978]: DEBUG oslo_vmware.api [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395034, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.689447] env[61978]: DEBUG nova.network.neutron [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Updating instance_info_cache with network_info: [{"id": "461cf97f-d4c1-4a04-bc0f-ea10c52ecce3", "address": "fa:16:3e:33:87:13", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap461cf97f-d4", "ovs_interfaceid": "461cf97f-d4c1-4a04-bc0f-ea10c52ecce3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.721632] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395031, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.732086] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1395032, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.745122] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5252de23-e7f5-2e7a-9090-026b28033450, 'name': SearchDatastore_Task, 'duration_secs': 0.033471} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.745462] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.745720] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 997.745981] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.746173] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.746370] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 997.752850] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc5206ab-d3e5-4c83-b08d-3bd60bda17a1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.755271] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.755271] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 997.765152] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 997.765447] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 997.766322] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87130f15-ef0f-4fe1-945f-3801eaae194e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.773865] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Waiting for the task: (returnval){ [ 997.773865] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523ea1c0-a2dc-6cbf-0c15-5395a52084be" [ 997.773865] env[61978]: _type = "Task" [ 997.773865] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.783438] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523ea1c0-a2dc-6cbf-0c15-5395a52084be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.002204] env[61978]: DEBUG oslo_concurrency.lockutils [req-39a0c769-35a6-44f2-9572-c6100b9526b4 req-4ebd1e13-e679-4341-9bec-123b11e3c239 service nova] Releasing lock "refresh_cache-2c1ce021-255f-454d-ba0e-c85380f3e973" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.020940] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395033, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.021302] env[61978]: DEBUG oslo_vmware.api [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395034, 'name': ReconfigVM_Task, 'duration_secs': 0.257994} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.022370] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.022808] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295894', 'volume_id': 'fa93f8e6-42e7-4249-a8fe-24b527dc71b5', 'name': 'volume-fa93f8e6-42e7-4249-a8fe-24b527dc71b5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '96bef3f3-a45c-43ba-a86a-66c1d5686ea6', 'attached_at': '', 'detached_at': '', 'volume_id': 'fa93f8e6-42e7-4249-a8fe-24b527dc71b5', 'serial': 'fa93f8e6-42e7-4249-a8fe-24b527dc71b5'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 998.193897] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "refresh_cache-8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.234557] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395031, 'name': CreateVM_Task, 'duration_secs': 0.696918} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.235205] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 998.236373] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.237234] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.237644] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 998.240495] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53870fb0-ed5d-4d03-8e95-92f675456f02 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.246656] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1395032, 'name': Destroy_Task, 'duration_secs': 0.921037} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.247899] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Destroyed the VM [ 998.247899] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 998.248165] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d307a7da-1efa-4aef-938f-17ce3aa0c9ac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.251599] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 998.251599] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ce8a94-2aae-d5df-3afb-124d88996d35" [ 998.251599] env[61978]: _type = "Task" [ 998.251599] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.262752] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 998.262752] env[61978]: value = "task-1395035" [ 998.262752] env[61978]: _type = "Task" [ 998.262752] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.269318] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ce8a94-2aae-d5df-3afb-124d88996d35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.275635] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1395035, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.289726] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523ea1c0-a2dc-6cbf-0c15-5395a52084be, 'name': SearchDatastore_Task, 'duration_secs': 0.019871} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.290742] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd5403bd-b158-4fd8-8f44-a92071e15033 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.300412] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Waiting for the task: (returnval){ [ 998.300412] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527e7bb0-5c31-39fc-ab12-f11a07d13201" [ 998.300412] env[61978]: _type = "Task" [ 998.300412] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.313244] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527e7bb0-5c31-39fc-ab12-f11a07d13201, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.379739] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7864406b-ae33-45c9-a4e7-d89d74d57de6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.388728] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0ae45b-b745-429c-b0af-2d0b962336c3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.420896] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c611a3d3-c4ae-4d0e-8319-12f22369432d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.430092] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea80ede-6c04-4561-b55b-fde3e6f91109 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.436892] env[61978]: DEBUG nova.objects.instance [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'flavor' on Instance uuid f3c837fb-be7e-40a6-aae4-7f213c62ab2c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.448875] env[61978]: DEBUG nova.compute.provider_tree [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 998.515121] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395033, 'name': ReconfigVM_Task, 'duration_secs': 0.618361} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.515539] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Reconfigured VM instance instance-0000002d to attach disk [datastore2] cb004a19-0048-4766-af7c-0fbde867f422/cb004a19-0048-4766-af7c-0fbde867f422.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 998.516225] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d67cdb5-6a38-4e6f-be42-4377b2100cdd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.525955] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 998.525955] env[61978]: value = "task-1395036" [ 998.525955] env[61978]: _type = "Task" [ 998.525955] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.550138] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395036, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.739049] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.739524] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6afa43dc-528e-4d58-9238-9e1baed135cb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.747915] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 998.747915] env[61978]: value = "task-1395037" [ 998.747915] env[61978]: _type = "Task" [ 998.747915] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.762412] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395037, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.771255] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ce8a94-2aae-d5df-3afb-124d88996d35, 'name': SearchDatastore_Task, 'duration_secs': 0.017668} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.775128] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.775459] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 998.775738] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.784931] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1395035, 'name': RemoveSnapshot_Task} progress is 15%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.807345] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.807875] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquired lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.808210] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b26a4784-698d-477a-8db7-58156899d231] Forcefully refreshing network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 998.815467] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527e7bb0-5c31-39fc-ab12-f11a07d13201, 'name': SearchDatastore_Task, 'duration_secs': 0.014376} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.816372] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.817529] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 2c1ce021-255f-454d-ba0e-c85380f3e973/2c1ce021-255f-454d-ba0e-c85380f3e973.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 998.818071] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.818316] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 998.818581] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1463f4a5-5dd5-4400-932a-3fd49f6b9d64 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.820840] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-296288de-9175-4b64-9f69-59408e2cccb3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.831486] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Waiting for the task: (returnval){ [ 998.831486] env[61978]: value = "task-1395038" [ 998.831486] env[61978]: _type = "Task" [ 998.831486] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.836945] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 998.837146] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 998.837995] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e0964e3-67ca-463b-9a3f-8da1ac74efb2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.846267] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 998.846267] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b0e21c-52dd-83f5-fbe3-30a7df56f61c" [ 998.846267] env[61978]: _type = "Task" [ 998.846267] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.852032] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395038, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.860474] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b0e21c-52dd-83f5-fbe3-30a7df56f61c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.953864] env[61978]: DEBUG nova.scheduler.client.report [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 998.957775] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2dd2b49b-823a-4e2b-83a1-865339babbe5 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.507s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.041580] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395036, 'name': Rename_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.163112] env[61978]: DEBUG nova.objects.instance [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lazy-loading 'flavor' on Instance uuid 96bef3f3-a45c-43ba-a86a-66c1d5686ea6 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.259286] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395037, 'name': PowerOffVM_Task, 'duration_secs': 0.266378} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.259753] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 999.260935] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b823064e-c0cc-4aba-86b0-4d2f7c1e9c72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.285737] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f89819-d296-4719-9a89-4c217a47c904 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.296226] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1395035, 'name': RemoveSnapshot_Task} progress is 84%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.328231] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.328647] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c76625a-da0f-4257-a43c-411b2b5d813b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.338665] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 999.338665] env[61978]: value = "task-1395039" [ 999.338665] env[61978]: _type = "Task" [ 999.338665] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.345973] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395038, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.357417] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 999.357755] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 999.358381] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.358381] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.358544] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 999.358841] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30b931b7-404e-48e7-b1ef-69fec355d8f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.368216] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b0e21c-52dd-83f5-fbe3-30a7df56f61c, 'name': SearchDatastore_Task, 'duration_secs': 0.025232} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.370868] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c696607-9a55-4990-9e1c-77c8293a8692 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.379009] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 999.379851] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 999.382186] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e71018b6-82f2-4d38-b0cb-375af58b5b55 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.385193] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 999.385193] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527df037-2029-9a04-74a2-5696b4681bf8" [ 999.385193] env[61978]: _type = "Task" [ 999.385193] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.392142] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 999.392142] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d50c38-d26b-07e3-c0cc-f9b251f619e1" [ 999.392142] env[61978]: _type = "Task" [ 999.392142] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.399254] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527df037-2029-9a04-74a2-5696b4681bf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.405404] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d50c38-d26b-07e3-c0cc-f9b251f619e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.464890] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.465500] env[61978]: DEBUG nova.compute.manager [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 999.469638] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee719a03-256c-45a1-afb9-e0186bf60233 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.469757] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee719a03-256c-45a1-afb9-e0186bf60233 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.470019] env[61978]: DEBUG nova.compute.manager [None req-ee719a03-256c-45a1-afb9-e0186bf60233 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 999.470639] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.249s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.472156] env[61978]: INFO nova.compute.claims [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 999.478723] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eddcebe-dc95-45c7-a979-ea35090d3ac4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.485858] env[61978]: DEBUG nova.compute.manager [None req-ee719a03-256c-45a1-afb9-e0186bf60233 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61978) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 999.486613] env[61978]: DEBUG nova.objects.instance [None req-ee719a03-256c-45a1-afb9-e0186bf60233 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'flavor' on Instance uuid f3c837fb-be7e-40a6-aae4-7f213c62ab2c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.540317] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395036, 'name': Rename_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.668224] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd68f58d-8fb7-4eb4-97a6-d47155e780d0 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.504s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.792196] env[61978]: DEBUG oslo_vmware.api [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1395035, 'name': RemoveSnapshot_Task, 'duration_secs': 1.116547} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.792663] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 999.792734] env[61978]: INFO nova.compute.manager [None req-c305652f-d6f9-47ef-aebd-773638d5b4f9 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Took 17.75 seconds to snapshot the instance on the hypervisor. [ 999.843995] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395038, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642869} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.843995] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 2c1ce021-255f-454d-ba0e-c85380f3e973/2c1ce021-255f-454d-ba0e-c85380f3e973.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 999.843995] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 999.844407] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6072cb80-664a-4b6e-9d82-e3d08e4e55e5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.853303] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Waiting for the task: (returnval){ [ 999.853303] env[61978]: value = "task-1395040" [ 999.853303] env[61978]: _type = "Task" [ 999.853303] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.863477] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395040, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.901497] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527df037-2029-9a04-74a2-5696b4681bf8, 'name': SearchDatastore_Task, 'duration_secs': 0.065006} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.901694] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.901987] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13/92eb5edb-803b-48d4-8c4f-338d7c3b3d13.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 999.902894] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8e84e2d-270b-460f-aeec-50229a227fbe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.908722] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d50c38-d26b-07e3-c0cc-f9b251f619e1, 'name': SearchDatastore_Task, 'duration_secs': 0.028184} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.909835] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b3ffd01-301c-48de-985e-b727b2a96e6f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.919078] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 999.919078] env[61978]: value = "task-1395041" [ 999.919078] env[61978]: _type = "Task" [ 999.919078] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.919389] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 999.919389] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a661c4-869f-5dbc-0b6d-0633eb9664b5" [ 999.919389] env[61978]: _type = "Task" [ 999.919389] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.928358] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.928647] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.936904] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a661c4-869f-5dbc-0b6d-0633eb9664b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.937155] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395041, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.973188] env[61978]: DEBUG nova.compute.utils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 999.974506] env[61978]: DEBUG nova.compute.manager [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 999.974695] env[61978]: DEBUG nova.network.neutron [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 999.991925] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee719a03-256c-45a1-afb9-e0186bf60233 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.992214] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52bac231-984c-43f5-b652-5488a0c68f8e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.004162] env[61978]: DEBUG oslo_vmware.api [None req-ee719a03-256c-45a1-afb9-e0186bf60233 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1000.004162] env[61978]: value = "task-1395042" [ 1000.004162] env[61978]: _type = "Task" [ 1000.004162] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.020300] env[61978]: DEBUG oslo_vmware.api [None req-ee719a03-256c-45a1-afb9-e0186bf60233 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395042, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.038340] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395036, 'name': Rename_Task, 'duration_secs': 1.222986} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.038340] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1000.038340] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82098461-2e21-41d5-8821-48f3f6d967b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.046039] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1000.046039] env[61978]: value = "task-1395043" [ 1000.046039] env[61978]: _type = "Task" [ 1000.046039] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.058437] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395043, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.070586] env[61978]: DEBUG nova.policy [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8f50bac42274555ab08e047cdb028ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43ebac7c44604f55b94cbc06648f4908', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1000.110582] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance_info_cache with network_info: [{"id": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "address": "fa:16:3e:94:aa:92", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a886f4f-5f", "ovs_interfaceid": "3a886f4f-5f7c-4f97-8f00-2555aebe9856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.366586] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395040, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11384} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.367220] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1000.367925] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe618b5-acae-4f82-a56f-88a1296cf1d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.396050] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 2c1ce021-255f-454d-ba0e-c85380f3e973/2c1ce021-255f-454d-ba0e-c85380f3e973.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1000.396869] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e18f48a-44e2-402e-9170-726c45cf44f9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.418195] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Waiting for the task: (returnval){ [ 1000.418195] env[61978]: value = "task-1395044" [ 1000.418195] env[61978]: _type = "Task" [ 1000.418195] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.438975] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395041, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.439363] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a661c4-869f-5dbc-0b6d-0633eb9664b5, 'name': SearchDatastore_Task, 'duration_secs': 0.016725} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.442471] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.442822] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk. {{(pid=61978) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1000.443623] env[61978]: INFO nova.compute.manager [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Detaching volume fa93f8e6-42e7-4249-a8fe-24b527dc71b5 [ 1000.445734] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395044, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.446291] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8058875-4bbb-434d-ad64-783f171babd6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.456102] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1000.456102] env[61978]: value = "task-1395045" [ 1000.456102] env[61978]: _type = "Task" [ 1000.456102] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.467420] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395045, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.478287] env[61978]: DEBUG nova.compute.manager [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1000.502747] env[61978]: INFO nova.virt.block_device [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Attempting to driver detach volume fa93f8e6-42e7-4249-a8fe-24b527dc71b5 from mountpoint /dev/sdb [ 1000.503239] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1000.503610] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295894', 'volume_id': 'fa93f8e6-42e7-4249-a8fe-24b527dc71b5', 'name': 'volume-fa93f8e6-42e7-4249-a8fe-24b527dc71b5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '96bef3f3-a45c-43ba-a86a-66c1d5686ea6', 'attached_at': '', 'detached_at': '', 'volume_id': 'fa93f8e6-42e7-4249-a8fe-24b527dc71b5', 'serial': 'fa93f8e6-42e7-4249-a8fe-24b527dc71b5'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1000.505649] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b8e406-9ea0-44b4-9f0a-a3b03ca3f8f1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.542538] env[61978]: DEBUG oslo_vmware.api [None req-ee719a03-256c-45a1-afb9-e0186bf60233 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395042, 'name': PowerOffVM_Task, 'duration_secs': 0.413626} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.546687] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee719a03-256c-45a1-afb9-e0186bf60233 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.546911] env[61978]: DEBUG nova.compute.manager [None req-ee719a03-256c-45a1-afb9-e0186bf60233 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1000.547816] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965a5a95-13fc-4339-a84e-32af0699d275 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.552560] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cde7bb1-9376-484d-96f6-52c711c85c80 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.573703] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395043, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.578300] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f314e5-b05b-47ce-bf8c-29e4a25af28e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.608408] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6ed1b8-a23d-4ee7-9505-3ae91fb77f67 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.614080] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Releasing lock "refresh_cache-b26a4784-698d-477a-8db7-58156899d231" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.614377] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b26a4784-698d-477a-8db7-58156899d231] Updated the network info_cache for instance {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1000.614667] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.614887] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.615108] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.632157] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.632963] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] The volume has not been displaced from its original location: [datastore2] volume-fa93f8e6-42e7-4249-a8fe-24b527dc71b5/volume-fa93f8e6-42e7-4249-a8fe-24b527dc71b5.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1000.641245] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Reconfiguring VM instance instance-00000012 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1000.646869] env[61978]: DEBUG nova.network.neutron [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Successfully created port: 1682c3e8-c35b-4055-90d6-a236d4439ee1 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1000.649334] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.649662] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4533572d-b7a3-4258-8ab4-7db972458092 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.671512] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.671638] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1000.672409] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.683170] env[61978]: DEBUG oslo_vmware.api [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1000.683170] env[61978]: value = "task-1395046" [ 1000.683170] env[61978]: _type = "Task" [ 1000.683170] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.700582] env[61978]: DEBUG oslo_vmware.api [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395046, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.939908] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395044, 'name': ReconfigVM_Task, 'duration_secs': 0.453137} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.947453] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 2c1ce021-255f-454d-ba0e-c85380f3e973/2c1ce021-255f-454d-ba0e-c85380f3e973.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1000.948414] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395041, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.756552} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.949017] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40099bac-d7b9-415a-895b-4e73be782370 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.951111] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13/92eb5edb-803b-48d4-8c4f-338d7c3b3d13.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1000.951339] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1000.951647] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a9218d09-6e6e-40c9-b152-2fe31147ae46 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.964137] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 1000.964137] env[61978]: value = "task-1395048" [ 1000.964137] env[61978]: _type = "Task" [ 1000.964137] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.965026] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Waiting for the task: (returnval){ [ 1000.965026] env[61978]: value = "task-1395047" [ 1000.965026] env[61978]: _type = "Task" [ 1000.965026] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.977768] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395045, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.991044] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395047, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.995233] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.065690] env[61978]: DEBUG oslo_vmware.api [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395043, 'name': PowerOnVM_Task, 'duration_secs': 0.688343} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.069046] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1001.069245] env[61978]: INFO nova.compute.manager [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Took 11.64 seconds to spawn the instance on the hypervisor. [ 1001.069463] env[61978]: DEBUG nova.compute.manager [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1001.070798] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6f35d3-f1e9-419f-aa7d-35269903633f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.078318] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee719a03-256c-45a1-afb9-e0186bf60233 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.608s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.176204] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.196018] env[61978]: DEBUG oslo_vmware.api [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395046, 'name': ReconfigVM_Task, 'duration_secs': 0.352467} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.196018] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Reconfigured VM instance instance-00000012 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1001.202660] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-425a33c8-eb50-4655-8109-dd63d13330b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.225586] env[61978]: DEBUG oslo_vmware.api [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1001.225586] env[61978]: value = "task-1395049" [ 1001.225586] env[61978]: _type = "Task" [ 1001.225586] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.240060] env[61978]: DEBUG oslo_vmware.api [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395049, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.299179] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900d6301-d5d4-4a1a-9c13-597a794baa3e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.308256] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aaf2ea1-f1e1-4204-9868-ff14a5eb105c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.343677] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd7d1f4-3062-487d-ab78-b330a083e41a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.352997] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6c8c2d-ae46-4dcb-8aff-da7ebd631b89 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.367776] env[61978]: DEBUG nova.compute.provider_tree [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.477167] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395045, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.481165] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.168195} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.483647] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1001.483647] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e97aeb0-5c12-49b1-a575-a1e13ac42ba6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.490418] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395047, 'name': Rename_Task, 'duration_secs': 0.28643} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.492155] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1001.492847] env[61978]: DEBUG nova.compute.manager [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1001.495506] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e94a3391-54fc-4904-a0c2-6002c59d7c20 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.520535] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13/92eb5edb-803b-48d4-8c4f-338d7c3b3d13.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.524679] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b111cae4-ce1a-424e-a9dc-e01bc313dbd1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.543965] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Waiting for the task: (returnval){ [ 1001.543965] env[61978]: value = "task-1395050" [ 1001.543965] env[61978]: _type = "Task" [ 1001.543965] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.555527] env[61978]: DEBUG nova.virt.hardware [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1001.555527] env[61978]: DEBUG nova.virt.hardware [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1001.555527] env[61978]: DEBUG nova.virt.hardware [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1001.555871] env[61978]: DEBUG nova.virt.hardware [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1001.555871] env[61978]: DEBUG nova.virt.hardware [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1001.555871] env[61978]: DEBUG nova.virt.hardware [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1001.555871] env[61978]: DEBUG nova.virt.hardware [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1001.555871] env[61978]: DEBUG nova.virt.hardware [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1001.556032] env[61978]: DEBUG nova.virt.hardware [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1001.556032] env[61978]: DEBUG nova.virt.hardware [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1001.556032] env[61978]: DEBUG nova.virt.hardware [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1001.557778] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8ab521-ff31-41d5-b8d3-2ed30268e4c6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.562079] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 1001.562079] env[61978]: value = "task-1395051" [ 1001.562079] env[61978]: _type = "Task" [ 1001.562079] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.571954] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395050, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.574096] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc60bd0-e67e-417c-a151-61ab517cda22 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.584186] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.599911] env[61978]: INFO nova.compute.manager [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Took 37.61 seconds to build instance. [ 1001.736659] env[61978]: DEBUG oslo_vmware.api [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395049, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.870808] env[61978]: DEBUG nova.scheduler.client.report [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1001.972368] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395045, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.057217] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395050, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.072126] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.103054] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4f22bfb5-88a3-4552-b644-84ad639adac9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "cb004a19-0048-4766-af7c-0fbde867f422" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.412s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.238423] env[61978]: DEBUG oslo_vmware.api [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395049, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.377145] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.906s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.377699] env[61978]: DEBUG nova.compute.manager [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1002.380346] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.986s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.380550] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.382595] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.981s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.382806] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.384533] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.276s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.386142] env[61978]: INFO nova.compute.claims [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1002.410772] env[61978]: INFO nova.scheduler.client.report [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleted allocations for instance f22e097d-f1a5-414a-82cc-ab455db876c7 [ 1002.413289] env[61978]: INFO nova.scheduler.client.report [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Deleted allocations for instance 371ddf66-a39b-41c4-bbd1-2a1c1b99834e [ 1002.473814] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395045, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.823121} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.474217] env[61978]: INFO nova.virt.vmwareapi.ds_util [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk. [ 1002.474947] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1361bf3-35d4-48d2-a7f0-7600c5d4c35a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.502023] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1002.502380] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c4464ad-d22a-49f6-8bad-10ca4a20ecf6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.516229] env[61978]: DEBUG nova.objects.instance [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'flavor' on Instance uuid f3c837fb-be7e-40a6-aae4-7f213c62ab2c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.525187] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1002.525187] env[61978]: value = "task-1395052" [ 1002.525187] env[61978]: _type = "Task" [ 1002.525187] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.536123] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Acquiring lock "dd686727-fc33-4dc4-b386-aabec27cf215" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.536398] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lock "dd686727-fc33-4dc4-b386-aabec27cf215" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.536613] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Acquiring lock "dd686727-fc33-4dc4-b386-aabec27cf215-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.536804] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lock "dd686727-fc33-4dc4-b386-aabec27cf215-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.537012] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lock "dd686727-fc33-4dc4-b386-aabec27cf215-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.542558] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395052, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.543131] env[61978]: INFO nova.compute.manager [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Terminating instance [ 1002.545269] env[61978]: DEBUG nova.compute.manager [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1002.545561] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1002.546785] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9cfab50-420d-4a18-879c-90e9c07ccdf2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.560432] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395050, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.563737] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.567458] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4cb804e-5d88-4274-8181-f664ed863ac2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.575803] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.577306] env[61978]: DEBUG oslo_vmware.api [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 1002.577306] env[61978]: value = "task-1395053" [ 1002.577306] env[61978]: _type = "Task" [ 1002.577306] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.586067] env[61978]: DEBUG oslo_vmware.api [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1395053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.739152] env[61978]: DEBUG oslo_vmware.api [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395049, 'name': ReconfigVM_Task, 'duration_secs': 1.333315} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.739569] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295894', 'volume_id': 'fa93f8e6-42e7-4249-a8fe-24b527dc71b5', 'name': 'volume-fa93f8e6-42e7-4249-a8fe-24b527dc71b5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '96bef3f3-a45c-43ba-a86a-66c1d5686ea6', 'attached_at': '', 'detached_at': '', 'volume_id': 'fa93f8e6-42e7-4249-a8fe-24b527dc71b5', 'serial': 'fa93f8e6-42e7-4249-a8fe-24b527dc71b5'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1002.894022] env[61978]: DEBUG nova.compute.utils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1002.895918] env[61978]: DEBUG nova.compute.manager [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1002.896187] env[61978]: DEBUG nova.network.neutron [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1002.926458] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5bb688e-f986-468f-9d66-3da2b413cc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "f22e097d-f1a5-414a-82cc-ab455db876c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.500s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.929297] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a2b6909-7fc9-4a30-a239-e38c2e5f8a31 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "371ddf66-a39b-41c4-bbd1-2a1c1b99834e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.676s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.951176] env[61978]: DEBUG nova.policy [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52367cbadba2425d89c45dd7dd03743b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'db23883478c54c51a786e66bb57aa17d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1003.001571] env[61978]: DEBUG nova.network.neutron [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Successfully updated port: 1682c3e8-c35b-4055-90d6-a236d4439ee1 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1003.019192] env[61978]: DEBUG nova.compute.manager [req-8b586368-9a70-4ba7-a34a-1281bbb47014 req-27e07b85-bf84-40f8-bda4-ba486d191a54 service nova] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Received event network-vif-plugged-1682c3e8-c35b-4055-90d6-a236d4439ee1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1003.019192] env[61978]: DEBUG oslo_concurrency.lockutils [req-8b586368-9a70-4ba7-a34a-1281bbb47014 req-27e07b85-bf84-40f8-bda4-ba486d191a54 service nova] Acquiring lock "8f609401-af09-4291-a1e7-a356fbc4aac9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.019192] env[61978]: DEBUG oslo_concurrency.lockutils [req-8b586368-9a70-4ba7-a34a-1281bbb47014 req-27e07b85-bf84-40f8-bda4-ba486d191a54 service nova] Lock "8f609401-af09-4291-a1e7-a356fbc4aac9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.019192] env[61978]: DEBUG oslo_concurrency.lockutils [req-8b586368-9a70-4ba7-a34a-1281bbb47014 req-27e07b85-bf84-40f8-bda4-ba486d191a54 service nova] Lock "8f609401-af09-4291-a1e7-a356fbc4aac9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.019378] env[61978]: DEBUG nova.compute.manager [req-8b586368-9a70-4ba7-a34a-1281bbb47014 req-27e07b85-bf84-40f8-bda4-ba486d191a54 service nova] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] No waiting events found dispatching network-vif-plugged-1682c3e8-c35b-4055-90d6-a236d4439ee1 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1003.019793] env[61978]: WARNING nova.compute.manager [req-8b586368-9a70-4ba7-a34a-1281bbb47014 req-27e07b85-bf84-40f8-bda4-ba486d191a54 service nova] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Received unexpected event network-vif-plugged-1682c3e8-c35b-4055-90d6-a236d4439ee1 for instance with vm_state building and task_state spawning. [ 1003.022695] env[61978]: DEBUG oslo_concurrency.lockutils [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.022855] env[61978]: DEBUG oslo_concurrency.lockutils [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquired lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.023494] env[61978]: DEBUG nova.network.neutron [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.023694] env[61978]: DEBUG nova.objects.instance [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'info_cache' on Instance uuid f3c837fb-be7e-40a6-aae4-7f213c62ab2c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.044054] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395052, 'name': ReconfigVM_Task, 'duration_secs': 0.454134} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.046313] env[61978]: DEBUG oslo_vmware.rw_handles [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f402bf-2e44-b898-316f-388237347862/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1003.046664] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.047732] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835b67cb-791e-4758-b0d5-f8a3ef5198b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.050883] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c6c7ae-d710-438a-8feb-4b1f8742ec2b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.064732] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395050, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.087795] env[61978]: DEBUG oslo_vmware.rw_handles [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f402bf-2e44-b898-316f-388237347862/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1003.087934] env[61978]: ERROR oslo_vmware.rw_handles [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f402bf-2e44-b898-316f-388237347862/disk-0.vmdk due to incomplete transfer. [ 1003.093616] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a73f60e3-2b8a-4af8-9cd3-d77c59250ed9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.098953] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c672289b-9484-4176-a346-d9c689ed4c03 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.116651] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395051, 'name': ReconfigVM_Task, 'duration_secs': 1.406872} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.122119] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13/92eb5edb-803b-48d4-8c4f-338d7c3b3d13.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.122530] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1003.122530] env[61978]: value = "task-1395054" [ 1003.122530] env[61978]: _type = "Task" [ 1003.122530] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.122760] env[61978]: DEBUG oslo_vmware.api [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1395053, 'name': PowerOffVM_Task, 'duration_secs': 0.238837} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.122978] env[61978]: DEBUG oslo_vmware.rw_handles [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f402bf-2e44-b898-316f-388237347862/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1003.123178] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Uploaded image 81a4ea36-3cda-42da-b32e-eca1d059e24b to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1003.125299] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1003.125571] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-314f5ef8-e7c4-48a5-8933-d3f4bad2c851 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.127567] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1003.127760] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1003.127998] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5d595bf1-306e-4ca6-9e47-39ce8fd23667 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.133300] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e589b5d3-084f-4f65-bbf9-bb654b7c9879 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.178991] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 1003.178991] env[61978]: value = "task-1395055" [ 1003.178991] env[61978]: _type = "Task" [ 1003.178991] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.180590] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1003.180590] env[61978]: value = "task-1395056" [ 1003.180590] env[61978]: _type = "Task" [ 1003.180590] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.202092] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395055, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.202092] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395056, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.294056] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1003.294732] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1003.294732] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Deleting the datastore file [datastore1] dd686727-fc33-4dc4-b386-aabec27cf215 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1003.295072] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2699912-dd5d-4436-a480-e8b87a356b15 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.303821] env[61978]: DEBUG nova.network.neutron [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Successfully created port: 948f3af3-d436-4415-b7d8-edefe3d32c25 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1003.308350] env[61978]: DEBUG oslo_vmware.api [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for the task: (returnval){ [ 1003.308350] env[61978]: value = "task-1395058" [ 1003.308350] env[61978]: _type = "Task" [ 1003.308350] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.310072] env[61978]: DEBUG nova.objects.instance [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lazy-loading 'flavor' on Instance uuid 96bef3f3-a45c-43ba-a86a-66c1d5686ea6 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.323282] env[61978]: DEBUG oslo_vmware.api [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1395058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.396700] env[61978]: DEBUG nova.compute.manager [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1003.505410] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "refresh_cache-8f609401-af09-4291-a1e7-a356fbc4aac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.507096] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "refresh_cache-8f609401-af09-4291-a1e7-a356fbc4aac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.507096] env[61978]: DEBUG nova.network.neutron [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.527594] env[61978]: DEBUG nova.objects.base [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1003.560757] env[61978]: DEBUG oslo_vmware.api [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395050, 'name': PowerOnVM_Task, 'duration_secs': 1.699515} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.561119] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.561308] env[61978]: INFO nova.compute.manager [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Took 10.04 seconds to spawn the instance on the hypervisor. [ 1003.561482] env[61978]: DEBUG nova.compute.manager [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1003.562556] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ac3fff-512a-4deb-bd74-29c40510462c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.641571] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395054, 'name': ReconfigVM_Task, 'duration_secs': 0.370501} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.641878] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1003.642162] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a316747f-4575-4561-bf38-00d1864be729 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.649469] env[61978]: DEBUG nova.compute.manager [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1003.651244] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb56901-4e59-4fad-8a9c-1dd1c86b5a75 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.663158] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1003.663158] env[61978]: value = "task-1395059" [ 1003.663158] env[61978]: _type = "Task" [ 1003.663158] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.696594] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395059, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.711043] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395056, 'name': Destroy_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.720078] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395055, 'name': Rename_Task, 'duration_secs': 0.262663} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.722043] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1003.722043] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0d0b4f0-d04b-4a6d-9331-81ac21a9bfc5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.732663] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 1003.732663] env[61978]: value = "task-1395060" [ 1003.732663] env[61978]: _type = "Task" [ 1003.732663] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.748830] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395060, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.829855] env[61978]: DEBUG oslo_vmware.api [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Task: {'id': task-1395058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.370077} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.830286] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1003.830587] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1003.830805] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1003.831267] env[61978]: INFO nova.compute.manager [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1003.831267] env[61978]: DEBUG oslo.service.loopingcall [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1003.832107] env[61978]: DEBUG nova.compute.manager [-] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1003.832204] env[61978]: DEBUG nova.network.neutron [-] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1004.033623] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc9fc50-261e-4efe-8ca4-0d939f419911 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.042640] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8b570b-4076-4d54-a035-8fd57bfff3aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.082284] env[61978]: DEBUG nova.network.neutron [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1004.084725] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05caaa90-27a8-45d3-9a02-f375b3f18bb1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.092961] env[61978]: INFO nova.compute.manager [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Took 40.04 seconds to build instance. [ 1004.098754] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cb3b0d-e4b3-4960-956f-3f964dfe8c08 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.116844] env[61978]: DEBUG nova.compute.provider_tree [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.176214] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395059, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.179448] env[61978]: INFO nova.compute.manager [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] instance snapshotting [ 1004.188020] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec84f52-ca74-472c-a7d2-10c89049085d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.209322] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0c140c-385f-460f-9f65-3bba13b7393f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.218749] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395056, 'name': Destroy_Task, 'duration_secs': 0.677569} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.223163] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Destroyed the VM [ 1004.223163] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1004.228937] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-41192d77-06dc-48a3-baf7-c849e7f0e011 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.238177] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1004.238177] env[61978]: value = "task-1395061" [ 1004.238177] env[61978]: _type = "Task" [ 1004.238177] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.256662] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395060, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.260998] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395061, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.322129] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ceacf547-4bea-4a4c-923d-8c350d3e2105 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.393s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.407814] env[61978]: DEBUG nova.compute.manager [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1004.433742] env[61978]: DEBUG nova.network.neutron [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Updating instance_info_cache with network_info: [{"id": "1682c3e8-c35b-4055-90d6-a236d4439ee1", "address": "fa:16:3e:ba:a2:f0", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1682c3e8-c3", "ovs_interfaceid": "1682c3e8-c35b-4055-90d6-a236d4439ee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.451668] env[61978]: DEBUG nova.virt.hardware [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1004.451967] env[61978]: DEBUG nova.virt.hardware [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1004.452243] env[61978]: DEBUG nova.virt.hardware [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1004.452466] env[61978]: DEBUG nova.virt.hardware [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1004.452650] env[61978]: DEBUG nova.virt.hardware [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1004.452858] env[61978]: DEBUG nova.virt.hardware [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1004.453129] env[61978]: DEBUG nova.virt.hardware [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1004.453345] env[61978]: DEBUG nova.virt.hardware [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1004.453564] env[61978]: DEBUG nova.virt.hardware [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1004.453808] env[61978]: DEBUG nova.virt.hardware [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1004.454044] env[61978]: DEBUG nova.virt.hardware [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1004.457758] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3289a41d-fe43-429d-9cb7-629303e6e89e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.472644] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173cbaa3-f842-46e8-80b5-07cc57114ee3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.479183] env[61978]: DEBUG nova.network.neutron [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Updating instance_info_cache with network_info: [{"id": "4bd8d0bd-32e6-47a0-9308-f8aebe253aa4", "address": "fa:16:3e:88:b3:ad", "network": {"id": "4a12b89f-0910-460a-8694-c424a051b6c6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-917593776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df79d3305e464a6b83f18497a2464140", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd8d0bd-32", "ovs_interfaceid": "4bd8d0bd-32e6-47a0-9308-f8aebe253aa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.596962] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cb36f1c5-5708-4c1c-a5b3-e2b88c457b1e tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lock "2c1ce021-255f-454d-ba0e-c85380f3e973" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.652s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.620426] env[61978]: DEBUG nova.scheduler.client.report [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1004.679082] env[61978]: DEBUG oslo_vmware.api [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395059, 'name': PowerOnVM_Task, 'duration_secs': 0.696093} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.679590] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1004.683712] env[61978]: DEBUG nova.compute.manager [None req-d6c52b3d-301b-4771-aa81-a265d5e5bc35 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1004.685143] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec821f43-1336-45e7-8243-fc0f8d860aac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.729873] env[61978]: DEBUG nova.network.neutron [-] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.735030] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1004.735030] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5081524b-0ede-446d-912f-a55be4e2b62b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.757135] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1004.757135] env[61978]: value = "task-1395062" [ 1004.757135] env[61978]: _type = "Task" [ 1004.757135] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.765023] env[61978]: DEBUG oslo_vmware.api [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395060, 'name': PowerOnVM_Task, 'duration_secs': 0.858022} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.771335] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1004.771744] env[61978]: DEBUG nova.compute.manager [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1004.778470] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50620600-277a-431b-aa28-3ab022ec47fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.783334] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395061, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.795776] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395062, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.796213] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquiring lock "2c1ce021-255f-454d-ba0e-c85380f3e973" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.796460] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lock "2c1ce021-255f-454d-ba0e-c85380f3e973" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.796692] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquiring lock "2c1ce021-255f-454d-ba0e-c85380f3e973-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.796913] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lock "2c1ce021-255f-454d-ba0e-c85380f3e973-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.797108] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lock "2c1ce021-255f-454d-ba0e-c85380f3e973-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.800599] env[61978]: INFO nova.compute.manager [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Terminating instance [ 1004.802294] env[61978]: DEBUG nova.compute.manager [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1004.804269] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1004.805823] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b0326a-3b23-4878-8bee-3122b965718d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.816350] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1004.816350] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10e305bc-f0c4-4c68-8e36-87741eb8c797 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.826147] env[61978]: DEBUG oslo_vmware.api [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Waiting for the task: (returnval){ [ 1004.826147] env[61978]: value = "task-1395063" [ 1004.826147] env[61978]: _type = "Task" [ 1004.826147] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.836547] env[61978]: DEBUG oslo_vmware.api [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395063, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.938343] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "refresh_cache-8f609401-af09-4291-a1e7-a356fbc4aac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.938599] env[61978]: DEBUG nova.compute.manager [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Instance network_info: |[{"id": "1682c3e8-c35b-4055-90d6-a236d4439ee1", "address": "fa:16:3e:ba:a2:f0", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1682c3e8-c3", "ovs_interfaceid": "1682c3e8-c35b-4055-90d6-a236d4439ee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1004.939528] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:a2:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed3ffc1d-9f86-4029-857e-6cd1d383edbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1682c3e8-c35b-4055-90d6-a236d4439ee1', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1004.950728] env[61978]: DEBUG oslo.service.loopingcall [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1004.952401] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1004.953765] env[61978]: DEBUG nova.compute.manager [req-4504e6d9-9a68-46de-b75b-c855cc14f84f req-c61f0345-f0b4-4d42-bf69-e7c052e1cc6a service nova] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Received event network-vif-plugged-948f3af3-d436-4415-b7d8-edefe3d32c25 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1004.953911] env[61978]: DEBUG oslo_concurrency.lockutils [req-4504e6d9-9a68-46de-b75b-c855cc14f84f req-c61f0345-f0b4-4d42-bf69-e7c052e1cc6a service nova] Acquiring lock "aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.954138] env[61978]: DEBUG oslo_concurrency.lockutils [req-4504e6d9-9a68-46de-b75b-c855cc14f84f req-c61f0345-f0b4-4d42-bf69-e7c052e1cc6a service nova] Lock "aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.954466] env[61978]: DEBUG oslo_concurrency.lockutils [req-4504e6d9-9a68-46de-b75b-c855cc14f84f req-c61f0345-f0b4-4d42-bf69-e7c052e1cc6a service nova] Lock "aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.954615] env[61978]: DEBUG nova.compute.manager [req-4504e6d9-9a68-46de-b75b-c855cc14f84f req-c61f0345-f0b4-4d42-bf69-e7c052e1cc6a service nova] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] No waiting events found dispatching network-vif-plugged-948f3af3-d436-4415-b7d8-edefe3d32c25 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1004.954894] env[61978]: WARNING nova.compute.manager [req-4504e6d9-9a68-46de-b75b-c855cc14f84f req-c61f0345-f0b4-4d42-bf69-e7c052e1cc6a service nova] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Received unexpected event network-vif-plugged-948f3af3-d436-4415-b7d8-edefe3d32c25 for instance with vm_state building and task_state spawning. [ 1004.955897] env[61978]: DEBUG nova.network.neutron [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Successfully updated port: 948f3af3-d436-4415-b7d8-edefe3d32c25 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1004.957047] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2427c557-3754-4667-9568-1621ad2a0a90 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.981572] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquiring lock "refresh_cache-aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.981752] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquired lock "refresh_cache-aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.981854] env[61978]: DEBUG nova.network.neutron [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1004.983281] env[61978]: DEBUG oslo_concurrency.lockutils [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Releasing lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.993349] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1004.993349] env[61978]: value = "task-1395064" [ 1004.993349] env[61978]: _type = "Task" [ 1004.993349] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.008241] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395064, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.126860] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.742s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.127514] env[61978]: DEBUG nova.compute.manager [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1005.130849] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.710s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.132652] env[61978]: INFO nova.compute.claims [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1005.154090] env[61978]: DEBUG nova.compute.manager [req-408cda63-9b05-4c86-9c0d-ee6136744dee req-c39d2b99-e8dd-40a2-8458-b6ed6c7a5a84 service nova] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Received event network-changed-1682c3e8-c35b-4055-90d6-a236d4439ee1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1005.154090] env[61978]: DEBUG nova.compute.manager [req-408cda63-9b05-4c86-9c0d-ee6136744dee req-c39d2b99-e8dd-40a2-8458-b6ed6c7a5a84 service nova] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Refreshing instance network info cache due to event network-changed-1682c3e8-c35b-4055-90d6-a236d4439ee1. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1005.154719] env[61978]: DEBUG oslo_concurrency.lockutils [req-408cda63-9b05-4c86-9c0d-ee6136744dee req-c39d2b99-e8dd-40a2-8458-b6ed6c7a5a84 service nova] Acquiring lock "refresh_cache-8f609401-af09-4291-a1e7-a356fbc4aac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.155034] env[61978]: DEBUG oslo_concurrency.lockutils [req-408cda63-9b05-4c86-9c0d-ee6136744dee req-c39d2b99-e8dd-40a2-8458-b6ed6c7a5a84 service nova] Acquired lock "refresh_cache-8f609401-af09-4291-a1e7-a356fbc4aac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.155235] env[61978]: DEBUG nova.network.neutron [req-408cda63-9b05-4c86-9c0d-ee6136744dee req-c39d2b99-e8dd-40a2-8458-b6ed6c7a5a84 service nova] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Refreshing network info cache for port 1682c3e8-c35b-4055-90d6-a236d4439ee1 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1005.236046] env[61978]: INFO nova.compute.manager [-] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Took 1.40 seconds to deallocate network for instance. [ 1005.260377] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395061, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.274947] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395062, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.306959] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.340029] env[61978]: DEBUG oslo_vmware.api [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395063, 'name': PowerOffVM_Task, 'duration_secs': 0.344628} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.340029] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.340506] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1005.344611] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-177b0596-8962-4dd5-8647-13f7591926ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.425136] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1005.425390] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1005.425587] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Deleting the datastore file [datastore1] 2c1ce021-255f-454d-ba0e-c85380f3e973 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1005.425873] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05f0e389-0cc5-44f5-a023-122b6ebf75e5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.433341] env[61978]: DEBUG oslo_vmware.api [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Waiting for the task: (returnval){ [ 1005.433341] env[61978]: value = "task-1395066" [ 1005.433341] env[61978]: _type = "Task" [ 1005.433341] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.443091] env[61978]: DEBUG oslo_vmware.api [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395066, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.487200] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1005.490991] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7487d6fa-8707-4a62-941e-6d2a0b330833 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.499532] env[61978]: DEBUG oslo_vmware.api [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1005.499532] env[61978]: value = "task-1395067" [ 1005.499532] env[61978]: _type = "Task" [ 1005.499532] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.503157] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395064, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.511535] env[61978]: DEBUG oslo_vmware.api [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395067, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.522244] env[61978]: DEBUG nova.network.neutron [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1005.637692] env[61978]: DEBUG nova.compute.utils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1005.641385] env[61978]: DEBUG nova.compute.manager [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1005.642012] env[61978]: DEBUG nova.network.neutron [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1005.702515] env[61978]: DEBUG nova.network.neutron [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Updating instance_info_cache with network_info: [{"id": "948f3af3-d436-4415-b7d8-edefe3d32c25", "address": "fa:16:3e:2e:b6:35", "network": {"id": "cb19d68e-470c-4933-8624-90b010acd9fd", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-585840747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db23883478c54c51a786e66bb57aa17d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap948f3af3-d4", "ovs_interfaceid": "948f3af3-d436-4415-b7d8-edefe3d32c25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.743023] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.744859] env[61978]: DEBUG nova.policy [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9c731900ec349ab945b14932491dc7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'add5612301884f668bbe80681629e8d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1005.761374] env[61978]: DEBUG oslo_vmware.api [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395061, 'name': RemoveSnapshot_Task, 'duration_secs': 1.09322} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.761692] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1005.764100] env[61978]: INFO nova.compute.manager [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Took 17.03 seconds to snapshot the instance on the hypervisor. [ 1005.775961] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395062, 'name': CreateSnapshot_Task, 'duration_secs': 0.943871} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.776345] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1005.777234] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce143da1-8c0f-49ab-8eb0-d1545336e089 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.945700] env[61978]: DEBUG oslo_vmware.api [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Task: {'id': task-1395066, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.380743} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.946017] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1005.946229] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1005.946429] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1005.946680] env[61978]: INFO nova.compute.manager [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1005.947036] env[61978]: DEBUG oslo.service.loopingcall [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1005.947302] env[61978]: DEBUG nova.compute.manager [-] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1005.947443] env[61978]: DEBUG nova.network.neutron [-] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1006.005326] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395064, 'name': CreateVM_Task, 'duration_secs': 0.527176} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.008369] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1006.009129] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.009496] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.009673] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1006.010346] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51e75936-06e7-40e2-946d-9859922f2073 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.016594] env[61978]: DEBUG oslo_vmware.api [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395067, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.018024] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1006.018024] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52714518-9c6a-c323-327d-0e9100139d40" [ 1006.018024] env[61978]: _type = "Task" [ 1006.018024] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.032583] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52714518-9c6a-c323-327d-0e9100139d40, 'name': SearchDatastore_Task, 'duration_secs': 0.01232} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.032922] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.033194] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1006.033677] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.033677] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.033783] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1006.034061] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a72244f6-0d2e-4f08-9210-1eff28f47a99 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.043058] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1006.043214] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1006.043947] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37f11490-be54-48d5-af96-570528df2bab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.049649] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1006.049649] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a9ba36-52a6-e8f3-534c-c7c48bc082ee" [ 1006.049649] env[61978]: _type = "Task" [ 1006.049649] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.057965] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a9ba36-52a6-e8f3-534c-c7c48bc082ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.072611] env[61978]: DEBUG nova.network.neutron [req-408cda63-9b05-4c86-9c0d-ee6136744dee req-c39d2b99-e8dd-40a2-8458-b6ed6c7a5a84 service nova] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Updated VIF entry in instance network info cache for port 1682c3e8-c35b-4055-90d6-a236d4439ee1. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1006.073174] env[61978]: DEBUG nova.network.neutron [req-408cda63-9b05-4c86-9c0d-ee6136744dee req-c39d2b99-e8dd-40a2-8458-b6ed6c7a5a84 service nova] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Updating instance_info_cache with network_info: [{"id": "1682c3e8-c35b-4055-90d6-a236d4439ee1", "address": "fa:16:3e:ba:a2:f0", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1682c3e8-c3", "ovs_interfaceid": "1682c3e8-c35b-4055-90d6-a236d4439ee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.143768] env[61978]: DEBUG nova.compute.utils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1006.205762] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Releasing lock "refresh_cache-aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.206285] env[61978]: DEBUG nova.compute.manager [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Instance network_info: |[{"id": "948f3af3-d436-4415-b7d8-edefe3d32c25", "address": "fa:16:3e:2e:b6:35", "network": {"id": "cb19d68e-470c-4933-8624-90b010acd9fd", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-585840747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db23883478c54c51a786e66bb57aa17d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap948f3af3-d4", "ovs_interfaceid": "948f3af3-d436-4415-b7d8-edefe3d32c25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1006.207276] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:b6:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ace50835-5731-4c77-b6c0-3076d7b4aa21', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '948f3af3-d436-4415-b7d8-edefe3d32c25', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1006.216576] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Creating folder: Project (db23883478c54c51a786e66bb57aa17d). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1006.216994] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9b3e8b7-a4a7-4de2-9c9b-c2b4cdfdccf3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.236642] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Created folder: Project (db23883478c54c51a786e66bb57aa17d) in parent group-v295764. [ 1006.236642] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Creating folder: Instances. Parent ref: group-v295902. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1006.236860] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e46159e5-de8c-4413-b51d-87cbbe9ad2d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.255781] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "3ee1023c-7837-4db0-88d4-f88c9a43fba3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.256097] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "3ee1023c-7837-4db0-88d4-f88c9a43fba3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.257632] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Created folder: Instances in parent group-v295902. [ 1006.257833] env[61978]: DEBUG oslo.service.loopingcall [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1006.258416] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1006.261095] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4734e772-701f-4e72-8c55-a8fb77138ac8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.287851] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1006.287851] env[61978]: value = "task-1395070" [ 1006.287851] env[61978]: _type = "Task" [ 1006.287851] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.300409] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1006.301712] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-524e46bd-a682-467f-9e47-73ccabcb039e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.308907] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395070, 'name': CreateVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.315265] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1006.315265] env[61978]: value = "task-1395071" [ 1006.315265] env[61978]: _type = "Task" [ 1006.315265] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.324684] env[61978]: DEBUG nova.network.neutron [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Successfully created port: 6324dacc-b741-4de5-8ded-34326888d25f {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1006.335416] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395071, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.354944] env[61978]: DEBUG nova.compute.manager [None req-44fdb4ed-9467-433b-b4b1-187bbda76c68 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Found 1 images (rotation: 2) {{(pid=61978) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 1006.371937] env[61978]: INFO nova.compute.manager [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Unrescuing [ 1006.372262] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "refresh_cache-8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.372378] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "refresh_cache-8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.372546] env[61978]: DEBUG nova.network.neutron [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1006.519226] env[61978]: DEBUG oslo_vmware.api [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395067, 'name': PowerOnVM_Task, 'duration_secs': 0.693474} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.523461] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1006.523750] env[61978]: DEBUG nova.compute.manager [None req-93e8f5b1-bc76-4ca5-9fc3-8ec0869bdc89 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1006.525055] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492098fd-037a-4fdf-8a7d-5c4062d455c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.567168] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a9ba36-52a6-e8f3-534c-c7c48bc082ee, 'name': SearchDatastore_Task, 'duration_secs': 0.018301} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.571858] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b75a7d05-1443-4cc3-a7fb-f497bea88edb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.575869] env[61978]: DEBUG oslo_concurrency.lockutils [req-408cda63-9b05-4c86-9c0d-ee6136744dee req-c39d2b99-e8dd-40a2-8458-b6ed6c7a5a84 service nova] Releasing lock "refresh_cache-8f609401-af09-4291-a1e7-a356fbc4aac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.576259] env[61978]: DEBUG nova.compute.manager [req-408cda63-9b05-4c86-9c0d-ee6136744dee req-c39d2b99-e8dd-40a2-8458-b6ed6c7a5a84 service nova] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Received event network-vif-deleted-3dba37f4-66d3-4de6-b597-7ea0b2a0221c {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1006.581181] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1006.581181] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e0faf3-c461-e305-89c2-8de274b881b4" [ 1006.581181] env[61978]: _type = "Task" [ 1006.581181] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.594114] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e0faf3-c461-e305-89c2-8de274b881b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.645649] env[61978]: DEBUG nova.compute.manager [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1006.762827] env[61978]: DEBUG nova.compute.manager [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1006.814144] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395070, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.829743] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395071, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.859292] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97875485-0ce1-4073-8a21-059cb6326c05 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.867198] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ebbf22-1374-478f-a827-2a141d46d059 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.904244] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74a1939-504d-41cb-81ae-281e37a7c2b4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.913864] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d659d7-e8aa-4b17-a3b3-7bc29bac6238 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.930758] env[61978]: DEBUG nova.network.neutron [-] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.933977] env[61978]: DEBUG nova.compute.provider_tree [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.093252] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e0faf3-c461-e305-89c2-8de274b881b4, 'name': SearchDatastore_Task, 'duration_secs': 0.017447} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.093538] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.093800] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 8f609401-af09-4291-a1e7-a356fbc4aac9/8f609401-af09-4291-a1e7-a356fbc4aac9.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1007.094086] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07e66a48-69ec-47e2-8f28-54b37ed902d3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.102905] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1007.102905] env[61978]: value = "task-1395072" [ 1007.102905] env[61978]: _type = "Task" [ 1007.102905] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.115619] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395072, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.149559] env[61978]: DEBUG nova.network.neutron [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Updating instance_info_cache with network_info: [{"id": "461cf97f-d4c1-4a04-bc0f-ea10c52ecce3", "address": "fa:16:3e:33:87:13", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap461cf97f-d4", "ovs_interfaceid": "461cf97f-d4c1-4a04-bc0f-ea10c52ecce3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.246310] env[61978]: DEBUG nova.compute.manager [req-336341b1-3473-4fc5-bc1c-48b56d84432d req-dd9bd75b-4838-4435-bab8-40f6b496e567 service nova] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Received event network-changed-948f3af3-d436-4415-b7d8-edefe3d32c25 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1007.246839] env[61978]: DEBUG nova.compute.manager [req-336341b1-3473-4fc5-bc1c-48b56d84432d req-dd9bd75b-4838-4435-bab8-40f6b496e567 service nova] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Refreshing instance network info cache due to event network-changed-948f3af3-d436-4415-b7d8-edefe3d32c25. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1007.246839] env[61978]: DEBUG oslo_concurrency.lockutils [req-336341b1-3473-4fc5-bc1c-48b56d84432d req-dd9bd75b-4838-4435-bab8-40f6b496e567 service nova] Acquiring lock "refresh_cache-aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.247273] env[61978]: DEBUG oslo_concurrency.lockutils [req-336341b1-3473-4fc5-bc1c-48b56d84432d req-dd9bd75b-4838-4435-bab8-40f6b496e567 service nova] Acquired lock "refresh_cache-aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.247273] env[61978]: DEBUG nova.network.neutron [req-336341b1-3473-4fc5-bc1c-48b56d84432d req-dd9bd75b-4838-4435-bab8-40f6b496e567 service nova] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Refreshing network info cache for port 948f3af3-d436-4415-b7d8-edefe3d32c25 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1007.289013] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.300154] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395070, 'name': CreateVM_Task, 'duration_secs': 0.562365} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.300226] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1007.300961] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.301179] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.301556] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1007.301841] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1820a19b-9973-470f-ad24-102a70935669 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.307319] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Waiting for the task: (returnval){ [ 1007.307319] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5295e435-e1ba-f691-a4d4-7fbd4ff32247" [ 1007.307319] env[61978]: _type = "Task" [ 1007.307319] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.316668] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5295e435-e1ba-f691-a4d4-7fbd4ff32247, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.324871] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395071, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.437028] env[61978]: DEBUG nova.scheduler.client.report [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1007.441041] env[61978]: INFO nova.compute.manager [-] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Took 1.49 seconds to deallocate network for instance. [ 1007.614918] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395072, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.655816] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "refresh_cache-8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.656923] env[61978]: DEBUG nova.objects.instance [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lazy-loading 'flavor' on Instance uuid 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.659357] env[61978]: DEBUG nova.compute.manager [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1007.676300] env[61978]: DEBUG nova.compute.manager [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1007.677550] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6a4159-68d3-435d-9d46-d48b71b3a19a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.694645] env[61978]: DEBUG nova.virt.hardware [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T15:03:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1957882391',id=30,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-722575268',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1007.695091] env[61978]: DEBUG nova.virt.hardware [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1007.695337] env[61978]: DEBUG nova.virt.hardware [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1007.695555] env[61978]: DEBUG nova.virt.hardware [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1007.695716] env[61978]: DEBUG nova.virt.hardware [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1007.695894] env[61978]: DEBUG nova.virt.hardware [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1007.696252] env[61978]: DEBUG nova.virt.hardware [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1007.696439] env[61978]: DEBUG nova.virt.hardware [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1007.696672] env[61978]: DEBUG nova.virt.hardware [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1007.697571] env[61978]: DEBUG nova.virt.hardware [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1007.697571] env[61978]: DEBUG nova.virt.hardware [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1007.698359] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4337ba0-6719-478e-90be-e1a82d5c6df8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.708769] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca80ecd8-dbae-488b-b438-15456e60b1a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.820385] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5295e435-e1ba-f691-a4d4-7fbd4ff32247, 'name': SearchDatastore_Task, 'duration_secs': 0.013615} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.824845] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.825439] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1007.825844] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.826150] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.828018] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1007.828018] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12a2df5e-9d75-49b0-a772-f408af056611 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.835441] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395071, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.840977] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1007.840977] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1007.840977] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79573449-db99-4ee5-b5b8-307aced3fc20 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.850021] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Waiting for the task: (returnval){ [ 1007.850021] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52de22fe-49bc-5a8b-dcbc-18c8cb86cce0" [ 1007.850021] env[61978]: _type = "Task" [ 1007.850021] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.860550] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52de22fe-49bc-5a8b-dcbc-18c8cb86cce0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.907564] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "8a21e6a7-c34e-4af0-b1fd-8a501694614c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.907564] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "8a21e6a7-c34e-4af0-b1fd-8a501694614c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.907564] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "8a21e6a7-c34e-4af0-b1fd-8a501694614c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.907564] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "8a21e6a7-c34e-4af0-b1fd-8a501694614c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.907877] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "8a21e6a7-c34e-4af0-b1fd-8a501694614c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.914804] env[61978]: INFO nova.compute.manager [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Terminating instance [ 1007.917303] env[61978]: DEBUG nova.compute.manager [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1007.917303] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.920105] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575c3f93-8ecf-4f10-be07-ac61d0806465 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.927903] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.929349] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0cb72351-3735-4685-8fe9-5c6bd36db4a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.938224] env[61978]: DEBUG oslo_vmware.api [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 1007.938224] env[61978]: value = "task-1395073" [ 1007.938224] env[61978]: _type = "Task" [ 1007.938224] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.943769] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.813s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.945087] env[61978]: DEBUG nova.compute.manager [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1007.953941] env[61978]: DEBUG oslo_concurrency.lockutils [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.435s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.954237] env[61978]: DEBUG oslo_concurrency.lockutils [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.958383] env[61978]: DEBUG oslo_concurrency.lockutils [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.251s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.958725] env[61978]: DEBUG oslo_concurrency.lockutils [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.961327] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.154s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.961742] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.964466] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.783s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.964873] env[61978]: DEBUG nova.objects.instance [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61978) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1007.970957] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.980337] env[61978]: DEBUG oslo_vmware.api [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.000909] env[61978]: INFO nova.scheduler.client.report [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Deleted allocations for instance 32bcb974-8db9-43e2-b397-b497f3a4f30c [ 1008.007086] env[61978]: INFO nova.scheduler.client.report [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Deleted allocations for instance e30d4a9f-1d75-453c-9552-2a0fbd4aa87d [ 1008.020929] env[61978]: INFO nova.scheduler.client.report [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Deleted allocations for instance ff793464-9bef-449f-8485-36d3b8fb1d69 [ 1008.115350] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395072, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.716492} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.115683] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 8f609401-af09-4291-a1e7-a356fbc4aac9/8f609401-af09-4291-a1e7-a356fbc4aac9.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1008.115993] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1008.116348] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c447d42b-f36c-48f0-a5d7-64189ef52421 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.126049] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1008.126049] env[61978]: value = "task-1395074" [ 1008.126049] env[61978]: _type = "Task" [ 1008.126049] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.134629] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395074, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.166462] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ba2cb6-71bb-4f5b-a1f6-074db9062ce0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.189358] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.189775] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de3996d1-5ec9-4e78-b3f6-1569e250c3ac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.195323] env[61978]: INFO nova.compute.manager [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] instance snapshotting [ 1008.196038] env[61978]: DEBUG nova.objects.instance [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'flavor' on Instance uuid c17c986e-c008-4414-8dd1-4ea836458048 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.203651] env[61978]: DEBUG oslo_vmware.api [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1008.203651] env[61978]: value = "task-1395075" [ 1008.203651] env[61978]: _type = "Task" [ 1008.203651] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.219791] env[61978]: DEBUG oslo_vmware.api [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395075, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.303662] env[61978]: DEBUG nova.network.neutron [req-336341b1-3473-4fc5-bc1c-48b56d84432d req-dd9bd75b-4838-4435-bab8-40f6b496e567 service nova] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Updated VIF entry in instance network info cache for port 948f3af3-d436-4415-b7d8-edefe3d32c25. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1008.304106] env[61978]: DEBUG nova.network.neutron [req-336341b1-3473-4fc5-bc1c-48b56d84432d req-dd9bd75b-4838-4435-bab8-40f6b496e567 service nova] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Updating instance_info_cache with network_info: [{"id": "948f3af3-d436-4415-b7d8-edefe3d32c25", "address": "fa:16:3e:2e:b6:35", "network": {"id": "cb19d68e-470c-4933-8624-90b010acd9fd", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-585840747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db23883478c54c51a786e66bb57aa17d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap948f3af3-d4", "ovs_interfaceid": "948f3af3-d436-4415-b7d8-edefe3d32c25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.332961] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395071, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.353081] env[61978]: DEBUG nova.network.neutron [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Successfully updated port: 6324dacc-b741-4de5-8ded-34326888d25f {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1008.361210] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52de22fe-49bc-5a8b-dcbc-18c8cb86cce0, 'name': SearchDatastore_Task, 'duration_secs': 0.018635} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.362073] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-229e7f52-0301-4e74-b8cc-beebb98782c1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.369103] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Waiting for the task: (returnval){ [ 1008.369103] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5288708d-b803-7a81-e20e-c5570479b5fd" [ 1008.369103] env[61978]: _type = "Task" [ 1008.369103] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.381014] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5288708d-b803-7a81-e20e-c5570479b5fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.449098] env[61978]: DEBUG oslo_vmware.api [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395073, 'name': PowerOffVM_Task, 'duration_secs': 0.367371} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.449343] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.449589] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1008.449922] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a50f707-de56-43f9-b351-ec453bf22b22 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.455779] env[61978]: DEBUG nova.compute.utils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1008.457195] env[61978]: DEBUG nova.compute.manager [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1008.457450] env[61978]: DEBUG nova.network.neutron [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1008.500441] env[61978]: DEBUG nova.policy [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '394d03fc54234c369ad2e1255eee9c82', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c31ffdd4e70d40ecbbb56777f9422a52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1008.515544] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b544c36-775f-49d1-8002-47332cfeb0c8 tempest-ServersTestManualDisk-269369081 tempest-ServersTestManualDisk-269369081-project-member] Lock "32bcb974-8db9-43e2-b397-b497f3a4f30c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.413s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.520907] env[61978]: DEBUG oslo_concurrency.lockutils [None req-15e061c0-ea3d-49dc-9d45-92b49b5b657e tempest-ServersTestBootFromVolume-119327357 tempest-ServersTestBootFromVolume-119327357-project-member] Lock "e30d4a9f-1d75-453c-9552-2a0fbd4aa87d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.541s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.530506] env[61978]: DEBUG oslo_concurrency.lockutils [None req-11ed0059-5ebe-41ed-8578-1b5bd9cd49e3 tempest-ServersV294TestFqdnHostnames-632964716 tempest-ServersV294TestFqdnHostnames-632964716-project-member] Lock "ff793464-9bef-449f-8485-36d3b8fb1d69" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.599s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.558275] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.558530] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.558703] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleting the datastore file [datastore2] 8a21e6a7-c34e-4af0-b1fd-8a501694614c {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.559339] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6984a69a-61eb-439e-9c70-4714dcf5cb18 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.567993] env[61978]: DEBUG oslo_vmware.api [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 1008.567993] env[61978]: value = "task-1395077" [ 1008.567993] env[61978]: _type = "Task" [ 1008.567993] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.577559] env[61978]: DEBUG oslo_vmware.api [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.635639] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395074, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.160376} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.635980] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1008.636778] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8370e538-b31d-44ed-aaca-5fe60aac9dd3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.659637] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 8f609401-af09-4291-a1e7-a356fbc4aac9/8f609401-af09-4291-a1e7-a356fbc4aac9.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1008.659949] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-708baa2f-9615-48c1-a266-635a955c27be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.680692] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1008.680692] env[61978]: value = "task-1395078" [ 1008.680692] env[61978]: _type = "Task" [ 1008.680692] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.689844] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395078, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.715022] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a510f8-3c33-492f-ae43-5b7836c10083 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.720705] env[61978]: DEBUG oslo_vmware.api [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395075, 'name': PowerOffVM_Task, 'duration_secs': 0.282277} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.733815] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.739673] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Reconfiguring VM instance instance-00000027 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1008.740297] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ddcd5fc-aa18-455b-9e77-b155a183d7c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.758621] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66d5a41-6030-4580-8bc8-f9293595c28c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.773894] env[61978]: DEBUG oslo_vmware.api [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1008.773894] env[61978]: value = "task-1395079" [ 1008.773894] env[61978]: _type = "Task" [ 1008.773894] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.783035] env[61978]: DEBUG oslo_vmware.api [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395079, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.791163] env[61978]: DEBUG nova.network.neutron [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Successfully created port: 46a7e991-f936-4f08-bc8b-1ea0bb74eeb9 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1008.807802] env[61978]: DEBUG oslo_concurrency.lockutils [req-336341b1-3473-4fc5-bc1c-48b56d84432d req-dd9bd75b-4838-4435-bab8-40f6b496e567 service nova] Releasing lock "refresh_cache-aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.808135] env[61978]: DEBUG nova.compute.manager [req-336341b1-3473-4fc5-bc1c-48b56d84432d req-dd9bd75b-4838-4435-bab8-40f6b496e567 service nova] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Received event network-vif-deleted-6cc22552-945d-43cc-be37-f57b7f56d3b2 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1008.834548] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395071, 'name': CloneVM_Task, 'duration_secs': 2.319726} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.834821] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Created linked-clone VM from snapshot [ 1008.835711] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85e4537-86fe-4887-b1cf-5871bd5a1371 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.843926] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Uploading image da902b14-4829-45fa-a2d7-251587b0567d {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1008.855758] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "refresh_cache-4c7053ee-7c44-49ee-8d30-bf14686c6b1c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.855950] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquired lock "refresh_cache-4c7053ee-7c44-49ee-8d30-bf14686c6b1c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.856110] env[61978]: DEBUG nova.network.neutron [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1008.877985] env[61978]: DEBUG oslo_vmware.rw_handles [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1008.877985] env[61978]: value = "vm-295904" [ 1008.877985] env[61978]: _type = "VirtualMachine" [ 1008.877985] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1008.878460] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-84243ce9-dae1-414e-ba41-bbeb8f3f25f8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.883507] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5288708d-b803-7a81-e20e-c5570479b5fd, 'name': SearchDatastore_Task, 'duration_secs': 0.013192} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.884107] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.884383] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba/aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1008.884729] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed347483-0254-4028-bdf1-aca438428878 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.889850] env[61978]: DEBUG oslo_vmware.rw_handles [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lease: (returnval){ [ 1008.889850] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524414d5-b93d-ed5d-2892-5694ca24c181" [ 1008.889850] env[61978]: _type = "HttpNfcLease" [ 1008.889850] env[61978]: } obtained for exporting VM: (result){ [ 1008.889850] env[61978]: value = "vm-295904" [ 1008.889850] env[61978]: _type = "VirtualMachine" [ 1008.889850] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1008.890092] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the lease: (returnval){ [ 1008.890092] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524414d5-b93d-ed5d-2892-5694ca24c181" [ 1008.890092] env[61978]: _type = "HttpNfcLease" [ 1008.890092] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1008.894508] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Waiting for the task: (returnval){ [ 1008.894508] env[61978]: value = "task-1395081" [ 1008.894508] env[61978]: _type = "Task" [ 1008.894508] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.899693] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1008.899693] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524414d5-b93d-ed5d-2892-5694ca24c181" [ 1008.899693] env[61978]: _type = "HttpNfcLease" [ 1008.899693] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1008.905347] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.963751] env[61978]: DEBUG nova.compute.manager [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1008.979196] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44671a43-99da-41bc-b3dc-43dc3db83ea6 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.979196] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.475s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.980789] env[61978]: INFO nova.compute.claims [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1009.080029] env[61978]: DEBUG oslo_vmware.api [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.304343} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.080394] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.080653] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1009.080811] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1009.080996] env[61978]: INFO nova.compute.manager [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1009.081327] env[61978]: DEBUG oslo.service.loopingcall [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1009.081469] env[61978]: DEBUG nova.compute.manager [-] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1009.081562] env[61978]: DEBUG nova.network.neutron [-] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1009.193633] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395078, 'name': ReconfigVM_Task, 'duration_secs': 0.377764} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.194620] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 8f609401-af09-4291-a1e7-a356fbc4aac9/8f609401-af09-4291-a1e7-a356fbc4aac9.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1009.195621] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed258b24-6697-4d18-81fc-28a82bb7001d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.206709] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1009.206709] env[61978]: value = "task-1395082" [ 1009.206709] env[61978]: _type = "Task" [ 1009.206709] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.220875] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395082, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.278318] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1009.278318] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8c26d591-7098-4060-b6f6-97fc1e828907 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.289405] env[61978]: DEBUG nova.compute.manager [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Received event network-vif-plugged-6324dacc-b741-4de5-8ded-34326888d25f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1009.290292] env[61978]: DEBUG oslo_concurrency.lockutils [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] Acquiring lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.291030] env[61978]: DEBUG oslo_concurrency.lockutils [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] Lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.291030] env[61978]: DEBUG oslo_concurrency.lockutils [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] Lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.291287] env[61978]: DEBUG nova.compute.manager [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] No waiting events found dispatching network-vif-plugged-6324dacc-b741-4de5-8ded-34326888d25f {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1009.291705] env[61978]: WARNING nova.compute.manager [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Received unexpected event network-vif-plugged-6324dacc-b741-4de5-8ded-34326888d25f for instance with vm_state building and task_state spawning. [ 1009.293028] env[61978]: DEBUG nova.compute.manager [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Received event network-changed-6324dacc-b741-4de5-8ded-34326888d25f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1009.293028] env[61978]: DEBUG nova.compute.manager [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Refreshing instance network info cache due to event network-changed-6324dacc-b741-4de5-8ded-34326888d25f. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1009.293028] env[61978]: DEBUG oslo_concurrency.lockutils [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] Acquiring lock "refresh_cache-4c7053ee-7c44-49ee-8d30-bf14686c6b1c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.301766] env[61978]: DEBUG oslo_vmware.api [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395079, 'name': ReconfigVM_Task, 'duration_secs': 0.326941} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.303459] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Reconfigured VM instance instance-00000027 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1009.303723] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1009.304106] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1009.304106] env[61978]: value = "task-1395083" [ 1009.304106] env[61978]: _type = "Task" [ 1009.304106] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.304429] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f4a8115-6314-4963-bcf2-71fc07ff0d48 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.318530] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395083, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.320485] env[61978]: DEBUG oslo_vmware.api [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1009.320485] env[61978]: value = "task-1395084" [ 1009.320485] env[61978]: _type = "Task" [ 1009.320485] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.331176] env[61978]: DEBUG oslo_vmware.api [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395084, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.407842] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1009.407842] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524414d5-b93d-ed5d-2892-5694ca24c181" [ 1009.407842] env[61978]: _type = "HttpNfcLease" [ 1009.407842] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1009.414276] env[61978]: DEBUG oslo_vmware.rw_handles [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1009.414276] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524414d5-b93d-ed5d-2892-5694ca24c181" [ 1009.414276] env[61978]: _type = "HttpNfcLease" [ 1009.414276] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1009.414276] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395081, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.415532] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba48fa3-c59b-44bb-8a53-8cc497e8e8cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.433810] env[61978]: DEBUG oslo_vmware.rw_handles [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f71df3-a943-71b5-84f1-17d743063d14/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1009.434166] env[61978]: DEBUG oslo_vmware.rw_handles [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f71df3-a943-71b5-84f1-17d743063d14/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1009.531393] env[61978]: DEBUG nova.network.neutron [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1009.591366] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b4431ee8-6486-4a1d-9f5d-15a7b3f56c45 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.715645] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395082, 'name': Rename_Task, 'duration_secs': 0.193317} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.715980] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1009.716283] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-251b9283-ba4c-4cda-b2d9-bf8650e92806 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.724521] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1009.724521] env[61978]: value = "task-1395085" [ 1009.724521] env[61978]: _type = "Task" [ 1009.724521] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.734228] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395085, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.817108] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395083, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.819380] env[61978]: DEBUG nova.network.neutron [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Updating instance_info_cache with network_info: [{"id": "6324dacc-b741-4de5-8ded-34326888d25f", "address": "fa:16:3e:36:fb:4d", "network": {"id": "c16671c6-39f3-456e-b159-3af6a9553564", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1089542181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add5612301884f668bbe80681629e8d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6324dacc-b7", "ovs_interfaceid": "6324dacc-b741-4de5-8ded-34326888d25f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.835009] env[61978]: DEBUG oslo_vmware.api [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395084, 'name': PowerOnVM_Task, 'duration_secs': 0.487819} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.835555] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1009.835995] env[61978]: DEBUG nova.compute.manager [None req-8aff68fe-cb85-48bd-bb32-96e06f64efc7 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1009.837819] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce674916-7379-4558-9405-ad51c65a904d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.912950] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395081, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.743363} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.913314] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba/aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1009.913554] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1009.913863] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2bfc477e-a50b-400d-a9b0-d558f5dafbe8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.924553] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Waiting for the task: (returnval){ [ 1009.924553] env[61978]: value = "task-1395086" [ 1009.924553] env[61978]: _type = "Task" [ 1009.924553] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.934965] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395086, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.043987] env[61978]: DEBUG nova.compute.manager [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1010.073826] env[61978]: DEBUG nova.virt.hardware [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1010.074208] env[61978]: DEBUG nova.virt.hardware [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1010.074474] env[61978]: DEBUG nova.virt.hardware [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1010.074774] env[61978]: DEBUG nova.virt.hardware [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1010.074933] env[61978]: DEBUG nova.virt.hardware [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1010.075267] env[61978]: DEBUG nova.virt.hardware [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1010.075500] env[61978]: DEBUG nova.virt.hardware [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1010.075668] env[61978]: DEBUG nova.virt.hardware [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1010.075965] env[61978]: DEBUG nova.virt.hardware [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1010.076077] env[61978]: DEBUG nova.virt.hardware [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1010.076204] env[61978]: DEBUG nova.virt.hardware [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1010.077454] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0508554-7768-4a2b-9cf9-6328991ba530 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.087718] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53183f33-ad73-45c9-93be-8911b5d9d07b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.188332] env[61978]: DEBUG nova.network.neutron [-] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.240911] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395085, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.317877] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395083, 'name': CreateSnapshot_Task, 'duration_secs': 0.869836} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.318563] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1010.319856] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39195400-210d-41db-ba57-3e3297c16e23 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.329548] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Releasing lock "refresh_cache-4c7053ee-7c44-49ee-8d30-bf14686c6b1c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.329949] env[61978]: DEBUG nova.compute.manager [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Instance network_info: |[{"id": "6324dacc-b741-4de5-8ded-34326888d25f", "address": "fa:16:3e:36:fb:4d", "network": {"id": "c16671c6-39f3-456e-b159-3af6a9553564", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1089542181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add5612301884f668bbe80681629e8d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6324dacc-b7", "ovs_interfaceid": "6324dacc-b741-4de5-8ded-34326888d25f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1010.333043] env[61978]: DEBUG oslo_concurrency.lockutils [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] Acquired lock "refresh_cache-4c7053ee-7c44-49ee-8d30-bf14686c6b1c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.333402] env[61978]: DEBUG nova.network.neutron [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Refreshing network info cache for port 6324dacc-b741-4de5-8ded-34326888d25f {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1010.334724] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:fb:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dced2f3d-7fd3-4a42-836d-9f02dab4c949', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6324dacc-b741-4de5-8ded-34326888d25f', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1010.343964] env[61978]: DEBUG oslo.service.loopingcall [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1010.345667] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1010.345934] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53593a3c-722e-4e9f-b2c2-1168e103b1c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.375515] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1010.375515] env[61978]: value = "task-1395087" [ 1010.375515] env[61978]: _type = "Task" [ 1010.375515] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.397163] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395087, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.438630] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075932} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.439124] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1010.440356] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efaf4d6c-b2dd-4c65-98c8-adaacf7068a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.473286] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba/aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1010.478718] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5195d5db-44b0-46c3-821c-a6187b8acc08 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.504354] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Waiting for the task: (returnval){ [ 1010.504354] env[61978]: value = "task-1395088" [ 1010.504354] env[61978]: _type = "Task" [ 1010.504354] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.520718] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395088, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.691670] env[61978]: INFO nova.compute.manager [-] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Took 1.61 seconds to deallocate network for instance. [ 1010.717838] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425aa304-bfef-4c27-aa47-6a86a2cce7ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.731651] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd012cbf-ecaf-41e1-a82c-3bc45c57ddf4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.744267] env[61978]: DEBUG oslo_vmware.api [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395085, 'name': PowerOnVM_Task, 'duration_secs': 0.872313} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.773644] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.774089] env[61978]: INFO nova.compute.manager [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Took 9.28 seconds to spawn the instance on the hypervisor. [ 1010.774398] env[61978]: DEBUG nova.compute.manager [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1010.776337] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4baca86b-4e66-4134-804e-0451282d86ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.779821] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef416ceb-0d6c-44d8-89b1-a64ee188c92a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.795100] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8173202f-adbd-4d3c-b76b-a3f040613ea8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.812333] env[61978]: DEBUG nova.compute.provider_tree [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.851796] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1010.854340] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-96e696db-fbda-4bbc-976e-4f9def296220 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.866856] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1010.866856] env[61978]: value = "task-1395089" [ 1010.866856] env[61978]: _type = "Task" [ 1010.866856] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.877606] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395089, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.888013] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395087, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.979795] env[61978]: DEBUG nova.network.neutron [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Successfully updated port: 46a7e991-f936-4f08-bc8b-1ea0bb74eeb9 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1011.018169] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395088, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.203731] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.314571] env[61978]: INFO nova.compute.manager [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Took 41.94 seconds to build instance. [ 1011.319487] env[61978]: DEBUG nova.scheduler.client.report [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1011.380831] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395089, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.392306] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395087, 'name': CreateVM_Task, 'duration_secs': 0.700745} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.392697] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1011.393654] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.394373] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.397993] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1011.397993] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd58f2bf-d843-4cb3-ae23-84c4204f990b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.404222] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 1011.404222] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5262f59c-55bf-0749-e55e-56bb8468e917" [ 1011.404222] env[61978]: _type = "Task" [ 1011.404222] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.421229] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5262f59c-55bf-0749-e55e-56bb8468e917, 'name': SearchDatastore_Task, 'duration_secs': 0.01481} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.423637] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.423936] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1011.424240] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.424397] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.424585] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1011.425048] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abbe38f9-e9a3-4344-a89d-1789ae4a8639 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.439739] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1011.443472] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1011.444373] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2dd9f80-4ea4-412a-8092-ca53495cd554 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.452252] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 1011.452252] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52910088-21a1-a1bf-1dea-95dd27c6b000" [ 1011.452252] env[61978]: _type = "Task" [ 1011.452252] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.464136] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52910088-21a1-a1bf-1dea-95dd27c6b000, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.468700] env[61978]: DEBUG nova.network.neutron [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Updated VIF entry in instance network info cache for port 6324dacc-b741-4de5-8ded-34326888d25f. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1011.468860] env[61978]: DEBUG nova.network.neutron [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Updating instance_info_cache with network_info: [{"id": "6324dacc-b741-4de5-8ded-34326888d25f", "address": "fa:16:3e:36:fb:4d", "network": {"id": "c16671c6-39f3-456e-b159-3af6a9553564", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1089542181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add5612301884f668bbe80681629e8d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6324dacc-b7", "ovs_interfaceid": "6324dacc-b741-4de5-8ded-34326888d25f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.482229] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "refresh_cache-0d48ae5d-7cc8-42b3-a993-44636e9cb171" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.482377] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "refresh_cache-0d48ae5d-7cc8-42b3-a993-44636e9cb171" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.484770] env[61978]: DEBUG nova.network.neutron [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1011.515662] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395088, 'name': ReconfigVM_Task, 'duration_secs': 0.616206} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.516086] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Reconfigured VM instance instance-00000031 to attach disk [datastore1] aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba/aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.516807] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4ec2ce8-1637-45ab-866d-084bb00290a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.523338] env[61978]: DEBUG nova.compute.manager [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Received event network-vif-deleted-cc183679-2e0d-4d97-9429-82606794bea3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1011.523655] env[61978]: DEBUG nova.compute.manager [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Received event network-vif-plugged-46a7e991-f936-4f08-bc8b-1ea0bb74eeb9 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1011.523939] env[61978]: DEBUG oslo_concurrency.lockutils [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] Acquiring lock "0d48ae5d-7cc8-42b3-a993-44636e9cb171-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.524241] env[61978]: DEBUG oslo_concurrency.lockutils [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] Lock "0d48ae5d-7cc8-42b3-a993-44636e9cb171-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.525057] env[61978]: DEBUG oslo_concurrency.lockutils [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] Lock "0d48ae5d-7cc8-42b3-a993-44636e9cb171-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.525415] env[61978]: DEBUG nova.compute.manager [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] No waiting events found dispatching network-vif-plugged-46a7e991-f936-4f08-bc8b-1ea0bb74eeb9 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1011.525664] env[61978]: WARNING nova.compute.manager [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Received unexpected event network-vif-plugged-46a7e991-f936-4f08-bc8b-1ea0bb74eeb9 for instance with vm_state building and task_state spawning. [ 1011.525910] env[61978]: DEBUG nova.compute.manager [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Received event network-changed-46a7e991-f936-4f08-bc8b-1ea0bb74eeb9 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1011.526104] env[61978]: DEBUG nova.compute.manager [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Refreshing instance network info cache due to event network-changed-46a7e991-f936-4f08-bc8b-1ea0bb74eeb9. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1011.526356] env[61978]: DEBUG oslo_concurrency.lockutils [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] Acquiring lock "refresh_cache-0d48ae5d-7cc8-42b3-a993-44636e9cb171" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.528421] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Waiting for the task: (returnval){ [ 1011.528421] env[61978]: value = "task-1395090" [ 1011.528421] env[61978]: _type = "Task" [ 1011.528421] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.540398] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395090, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.822605] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1da717ba-31e8-4784-80be-ca59a132e382 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "8f609401-af09-4291-a1e7-a356fbc4aac9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.753s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.826342] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.845s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.826342] env[61978]: DEBUG nova.compute.manager [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1011.830029] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.358s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.830029] env[61978]: DEBUG nova.objects.instance [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lazy-loading 'resources' on Instance uuid ea1c2d74-70b4-4547-a887-78e291c3082a {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.889847] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395089, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.972159] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52910088-21a1-a1bf-1dea-95dd27c6b000, 'name': SearchDatastore_Task, 'duration_secs': 0.014043} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.974066] env[61978]: DEBUG oslo_concurrency.lockutils [req-8c56f8a6-cf5e-46b6-b6f3-2ba2bc895831 req-9391e343-7f23-4818-96b3-119ea05ca5f7 service nova] Releasing lock "refresh_cache-4c7053ee-7c44-49ee-8d30-bf14686c6b1c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.974484] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6efec21-faa7-4573-a551-9e0d2b260c07 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.983342] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 1011.983342] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5216b6b4-e041-52bb-1de4-8ec692c5575a" [ 1011.983342] env[61978]: _type = "Task" [ 1011.983342] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.000376] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5216b6b4-e041-52bb-1de4-8ec692c5575a, 'name': SearchDatastore_Task, 'duration_secs': 0.013146} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.000376] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.000748] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 4c7053ee-7c44-49ee-8d30-bf14686c6b1c/4c7053ee-7c44-49ee-8d30-bf14686c6b1c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1012.000953] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-271ee0ed-35a5-4c8f-af81-4b00dda5e1d8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.010798] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 1012.010798] env[61978]: value = "task-1395091" [ 1012.010798] env[61978]: _type = "Task" [ 1012.010798] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.026060] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395091, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.044147] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395090, 'name': Rename_Task, 'duration_secs': 0.215536} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.044481] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1012.044791] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed0ca37b-c785-418e-b80f-e1f99dd901c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.053811] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Waiting for the task: (returnval){ [ 1012.053811] env[61978]: value = "task-1395092" [ 1012.053811] env[61978]: _type = "Task" [ 1012.053811] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.065576] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.066564] env[61978]: DEBUG nova.network.neutron [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1012.331222] env[61978]: DEBUG nova.compute.utils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1012.336269] env[61978]: DEBUG nova.compute.manager [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1012.339534] env[61978]: DEBUG nova.network.neutron [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1012.384769] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395089, 'name': CloneVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.417972] env[61978]: DEBUG nova.network.neutron [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Updating instance_info_cache with network_info: [{"id": "46a7e991-f936-4f08-bc8b-1ea0bb74eeb9", "address": "fa:16:3e:e6:e4:e5", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46a7e991-f9", "ovs_interfaceid": "46a7e991-f936-4f08-bc8b-1ea0bb74eeb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.461900] env[61978]: DEBUG nova.policy [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ab697d6ab4e4ece8b290afbf5ec1366', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a33ac41ae0247b59c400c6ed9145239', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1012.528318] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395091, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.570917] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395092, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.651343] env[61978]: DEBUG oslo_concurrency.lockutils [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "8f609401-af09-4291-a1e7-a356fbc4aac9" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.651343] env[61978]: DEBUG oslo_concurrency.lockutils [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "8f609401-af09-4291-a1e7-a356fbc4aac9" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.651343] env[61978]: INFO nova.compute.manager [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Shelving [ 1012.842888] env[61978]: DEBUG nova.compute.manager [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1012.884323] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395089, 'name': CloneVM_Task, 'duration_secs': 1.573437} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.887729] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Created linked-clone VM from snapshot [ 1012.889651] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bfd588c-cc46-466c-8c6e-2e3b7ea57591 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.906038] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Uploading image f2517d29-6937-411e-90af-450d799a9c72 {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1012.921271] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "refresh_cache-0d48ae5d-7cc8-42b3-a993-44636e9cb171" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.922470] env[61978]: DEBUG nova.compute.manager [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Instance network_info: |[{"id": "46a7e991-f936-4f08-bc8b-1ea0bb74eeb9", "address": "fa:16:3e:e6:e4:e5", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46a7e991-f9", "ovs_interfaceid": "46a7e991-f936-4f08-bc8b-1ea0bb74eeb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1012.922470] env[61978]: DEBUG oslo_concurrency.lockutils [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] Acquired lock "refresh_cache-0d48ae5d-7cc8-42b3-a993-44636e9cb171" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.922754] env[61978]: DEBUG nova.network.neutron [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Refreshing network info cache for port 46a7e991-f936-4f08-bc8b-1ea0bb74eeb9 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1012.923273] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:e4:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '411f389f-4e4f-4450-891e-38944cac6135', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '46a7e991-f936-4f08-bc8b-1ea0bb74eeb9', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1012.931787] env[61978]: DEBUG oslo.service.loopingcall [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.932948] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1012.933473] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82db5e76-8bed-4d87-8ecb-f8388afd7693 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.966924] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1012.966924] env[61978]: value = "task-1395093" [ 1012.966924] env[61978]: _type = "Task" [ 1012.966924] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.975229] env[61978]: DEBUG oslo_vmware.rw_handles [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1012.975229] env[61978]: value = "vm-295908" [ 1012.975229] env[61978]: _type = "VirtualMachine" [ 1012.975229] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1012.975229] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d26444e3-57fd-4e14-b7c8-db799ba94368 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.988791] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395093, 'name': CreateVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.991812] env[61978]: DEBUG oslo_vmware.rw_handles [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lease: (returnval){ [ 1012.991812] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bff802-66fe-230b-087d-1590ec60f082" [ 1012.991812] env[61978]: _type = "HttpNfcLease" [ 1012.991812] env[61978]: } obtained for exporting VM: (result){ [ 1012.991812] env[61978]: value = "vm-295908" [ 1012.991812] env[61978]: _type = "VirtualMachine" [ 1012.991812] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1012.992613] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the lease: (returnval){ [ 1012.992613] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bff802-66fe-230b-087d-1590ec60f082" [ 1012.992613] env[61978]: _type = "HttpNfcLease" [ 1012.992613] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1012.997130] env[61978]: DEBUG nova.network.neutron [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Successfully created port: 7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1013.009903] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1013.009903] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bff802-66fe-230b-087d-1590ec60f082" [ 1013.009903] env[61978]: _type = "HttpNfcLease" [ 1013.009903] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1013.011027] env[61978]: DEBUG oslo_vmware.rw_handles [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1013.011027] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bff802-66fe-230b-087d-1590ec60f082" [ 1013.011027] env[61978]: _type = "HttpNfcLease" [ 1013.011027] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1013.011409] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d332708-7ce5-4f30-bfa1-fce0901ddb91 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.027690] env[61978]: DEBUG oslo_vmware.rw_handles [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521bec9d-fd5a-3cf0-0877-9018c24e20c6/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1013.028076] env[61978]: DEBUG oslo_vmware.rw_handles [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521bec9d-fd5a-3cf0-0877-9018c24e20c6/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1013.033141] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395091, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6951} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.034437] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 4c7053ee-7c44-49ee-8d30-bf14686c6b1c/4c7053ee-7c44-49ee-8d30-bf14686c6b1c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.034664] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.111322] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d854fc3a-ded7-4037-a02e-8c9d597a6806 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.139967] env[61978]: DEBUG oslo_vmware.api [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395092, 'name': PowerOnVM_Task, 'duration_secs': 0.819993} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.143919] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1013.143919] env[61978]: INFO nova.compute.manager [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Took 8.73 seconds to spawn the instance on the hypervisor. [ 1013.143919] env[61978]: DEBUG nova.compute.manager [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1013.143919] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 1013.143919] env[61978]: value = "task-1395095" [ 1013.143919] env[61978]: _type = "Task" [ 1013.143919] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.144814] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b347543-0c29-4107-bddd-727093ed8eaf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.158031] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99038bc1-1de3-4d18-886b-ca9c0436c030 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.184940] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395095, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.186081] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1013.191107] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c64a0f8-31bc-4ff7-807f-2e05051f4873 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.196395] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e03735ed-9d6e-40d9-825c-26ba45f65d14 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.199809] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cde88469-41c8-4cb2-80f8-8f82abe6adc3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.243521] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37880e1f-4bbe-4a3d-8b80-f75bd8a3ad14 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.246979] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1013.246979] env[61978]: value = "task-1395096" [ 1013.246979] env[61978]: _type = "Task" [ 1013.246979] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.257938] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c44830-9e0f-4da8-9eda-51e09e127a96 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.266172] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395096, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.288038] env[61978]: DEBUG nova.compute.provider_tree [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.481220] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395093, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.671039] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395095, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102367} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.671039] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1013.671039] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de26f35-e659-4ada-8d02-bf9d3c5366ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.708538] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 4c7053ee-7c44-49ee-8d30-bf14686c6b1c/4c7053ee-7c44-49ee-8d30-bf14686c6b1c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1013.715424] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46091234-0432-4edf-8eae-5c6ffd0dff22 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.733461] env[61978]: INFO nova.compute.manager [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Took 41.53 seconds to build instance. [ 1013.749822] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 1013.749822] env[61978]: value = "task-1395097" [ 1013.749822] env[61978]: _type = "Task" [ 1013.749822] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.777290] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395097, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.783822] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395096, 'name': PowerOffVM_Task, 'duration_secs': 0.368296} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.784531] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1013.786126] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b840fe86-de93-44aa-b11b-a6d4edf27b91 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.791956] env[61978]: DEBUG nova.scheduler.client.report [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1013.820032] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88ea8b3-811d-4bc4-b3e1-2971ed4171bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.851929] env[61978]: DEBUG nova.compute.manager [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1013.919761] env[61978]: DEBUG nova.network.neutron [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Updated VIF entry in instance network info cache for port 46a7e991-f936-4f08-bc8b-1ea0bb74eeb9. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1013.922468] env[61978]: DEBUG nova.network.neutron [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Updating instance_info_cache with network_info: [{"id": "46a7e991-f936-4f08-bc8b-1ea0bb74eeb9", "address": "fa:16:3e:e6:e4:e5", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46a7e991-f9", "ovs_interfaceid": "46a7e991-f936-4f08-bc8b-1ea0bb74eeb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.983798] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395093, 'name': CreateVM_Task, 'duration_secs': 0.526718} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.985304] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1013.985304] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.987179] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.987764] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1013.988371] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-541aad95-0db4-4251-9265-e4cc6505d3b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.997980] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1013.997980] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52862503-bf27-177d-0df2-717b3e453275" [ 1013.997980] env[61978]: _type = "Task" [ 1013.997980] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.017698] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52862503-bf27-177d-0df2-717b3e453275, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.236418] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eea2f66f-090c-4cd2-a00d-a46916b9d841 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lock "aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.127s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.267579] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395097, 'name': ReconfigVM_Task, 'duration_secs': 0.495125} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.267579] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 4c7053ee-7c44-49ee-8d30-bf14686c6b1c/4c7053ee-7c44-49ee-8d30-bf14686c6b1c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.267579] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=61978) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1014.267816] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-7329ced6-35f5-4210-857e-26ac1f5c02c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.278664] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 1014.278664] env[61978]: value = "task-1395098" [ 1014.278664] env[61978]: _type = "Task" [ 1014.278664] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.291823] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395098, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.297894] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.470s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.301104] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.659s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.302485] env[61978]: INFO nova.compute.claims [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1014.334132] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1014.335490] env[61978]: INFO nova.scheduler.client.report [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Deleted allocations for instance ea1c2d74-70b4-4547-a887-78e291c3082a [ 1014.336858] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a1d1d4a9-174b-4dba-9155-b500e7c7292c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.350649] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1014.350649] env[61978]: value = "task-1395099" [ 1014.350649] env[61978]: _type = "Task" [ 1014.350649] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.366531] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395099, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.428892] env[61978]: DEBUG oslo_concurrency.lockutils [req-c5a4d7c3-7370-4f84-bc4c-4885e8d99348 req-8801627a-8a41-4895-aa65-e1d27e4000bf service nova] Releasing lock "refresh_cache-0d48ae5d-7cc8-42b3-a993-44636e9cb171" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.511899] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52862503-bf27-177d-0df2-717b3e453275, 'name': SearchDatastore_Task, 'duration_secs': 0.013316} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.513172] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.513499] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1014.513763] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.513914] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.515027] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1014.515143] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d87c0beb-cde9-4e60-8d51-2444720d44ce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.528238] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1014.528423] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1014.529422] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9aabe3af-7426-4680-a135-5a11ada22cf2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.537636] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1014.537636] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52534167-add2-65c7-d9a2-2cfb56d5e45d" [ 1014.537636] env[61978]: _type = "Task" [ 1014.537636] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.548590] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52534167-add2-65c7-d9a2-2cfb56d5e45d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.725166] env[61978]: DEBUG nova.network.neutron [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Successfully updated port: 7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1014.790058] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395098, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.080681} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.790345] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=61978) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1014.791265] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdc5597-9b49-45ee-aba7-14a7569d3876 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.821817] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 4c7053ee-7c44-49ee-8d30-bf14686c6b1c/ephemeral_0.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.822543] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2288337b-e64e-4000-8811-0877cae29c1d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.847600] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 1014.847600] env[61978]: value = "task-1395100" [ 1014.847600] env[61978]: _type = "Task" [ 1014.847600] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.848210] env[61978]: DEBUG oslo_concurrency.lockutils [None req-db1270db-7d8c-414c-9f35-779d2db5e86b tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "ea1c2d74-70b4-4547-a887-78e291c3082a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.806s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.863151] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395099, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.866448] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395100, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.049349] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52534167-add2-65c7-d9a2-2cfb56d5e45d, 'name': SearchDatastore_Task, 'duration_secs': 0.015148} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.050352] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf20169e-9131-45f2-b641-920f65618fb6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.056821] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1015.056821] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526ea960-c5d4-b41a-38b6-8020152e2cb5" [ 1015.056821] env[61978]: _type = "Task" [ 1015.056821] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.066862] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526ea960-c5d4-b41a-38b6-8020152e2cb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.227661] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "refresh_cache-7e71c8de-1f94-4161-8ad8-a67792c5ce24" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.227913] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "refresh_cache-7e71c8de-1f94-4161-8ad8-a67792c5ce24" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.228257] env[61978]: DEBUG nova.network.neutron [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.365108] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395100, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.374777] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395099, 'name': CreateSnapshot_Task, 'duration_secs': 0.935879} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.375187] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1015.375985] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde83458-46d1-4938-aa55-1fef8087dbf5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.568991] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526ea960-c5d4-b41a-38b6-8020152e2cb5, 'name': SearchDatastore_Task, 'duration_secs': 0.019835} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.569339] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.569617] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 0d48ae5d-7cc8-42b3-a993-44636e9cb171/0d48ae5d-7cc8-42b3-a993-44636e9cb171.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1015.569846] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75cc9d11-2cce-4840-b65b-02e894991b8d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.579357] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1015.579357] env[61978]: value = "task-1395101" [ 1015.579357] env[61978]: _type = "Task" [ 1015.579357] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.591399] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.773735] env[61978]: DEBUG nova.network.neutron [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1015.806797] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ad00f3-2c2a-435d-9c06-2e4db79d2dad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.817921] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e5b51f-cd18-4070-beef-218965564f0e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.858246] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60093694-1a47-48d8-8f81-01e833eeed63 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.871247] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4a5b92-2357-4e5e-a8f1-4a6e86f0d849 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.875780] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395100, 'name': ReconfigVM_Task, 'duration_secs': 0.541834} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.876198] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 4c7053ee-7c44-49ee-8d30-bf14686c6b1c/ephemeral_0.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.877903] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11a45e4b-e5e8-4429-8878-2b2cb81f0489 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.898649] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1015.899338] env[61978]: DEBUG nova.compute.provider_tree [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.905317] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5cab6cf4-0386-419d-8352-34c542bbe49d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.910309] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 1015.910309] env[61978]: value = "task-1395102" [ 1015.910309] env[61978]: _type = "Task" [ 1015.910309] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.912324] env[61978]: DEBUG nova.scheduler.client.report [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1015.924588] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1015.924588] env[61978]: value = "task-1395103" [ 1015.924588] env[61978]: _type = "Task" [ 1015.924588] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.933076] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395102, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.940252] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395103, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.004770] env[61978]: DEBUG nova.network.neutron [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Updating instance_info_cache with network_info: [{"id": "7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f", "address": "fa:16:3e:ca:10:89", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f676fff-3b", "ovs_interfaceid": "7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.091973] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395101, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.417910] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.117s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.418512] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1016.425692] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.018s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.427543] env[61978]: INFO nova.compute.claims [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1016.441037] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395102, 'name': Rename_Task, 'duration_secs': 0.242843} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.443655] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1016.443993] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395103, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.444205] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb1ac570-8a10-4d5e-b38a-2f7413917a23 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.453178] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 1016.453178] env[61978]: value = "task-1395104" [ 1016.453178] env[61978]: _type = "Task" [ 1016.453178] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.464237] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.508437] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "refresh_cache-7e71c8de-1f94-4161-8ad8-a67792c5ce24" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.508928] env[61978]: DEBUG nova.compute.manager [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Instance network_info: |[{"id": "7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f", "address": "fa:16:3e:ca:10:89", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f676fff-3b", "ovs_interfaceid": "7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1016.591530] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395101, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.928047] env[61978]: DEBUG nova.compute.utils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1016.930738] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1016.930995] env[61978]: DEBUG nova.network.neutron [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1016.953386] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395103, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.965326] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395104, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.982460] env[61978]: DEBUG nova.policy [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '934ae9fb7c73480292add0c86672649e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b106bb3dbeb4bc9a4fc832c860a559d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1017.092222] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395101, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.325136] env[61978]: DEBUG nova.network.neutron [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Successfully created port: 34e47e3b-49fc-4498-b258-cf27c276e3ac {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1017.431748] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1017.448353] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395103, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.465366] env[61978]: DEBUG oslo_vmware.api [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395104, 'name': PowerOnVM_Task, 'duration_secs': 0.621497} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.465708] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1017.465961] env[61978]: INFO nova.compute.manager [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Took 9.81 seconds to spawn the instance on the hypervisor. [ 1017.466214] env[61978]: DEBUG nova.compute.manager [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1017.467172] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65946d29-c9b8-4535-97c1-28a1a7a9b5ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.594838] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395101, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.59205} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.595440] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 0d48ae5d-7cc8-42b3-a993-44636e9cb171/0d48ae5d-7cc8-42b3-a993-44636e9cb171.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1017.595512] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1017.596777] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb0c2863-cda1-4e78-b1df-39d617c6e520 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.605970] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1017.605970] env[61978]: value = "task-1395105" [ 1017.605970] env[61978]: _type = "Task" [ 1017.605970] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.616060] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395105, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.897514] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0dba1cd-b53e-4c79-9def-315595b96d4e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.906966] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b202ffd8-3954-4f35-be3b-386aebce37d6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.947904] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c43406-215b-49df-bd85-6a9bfd6685ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.961312] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92775290-5794-4833-930d-aeb31ff26bc7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.965482] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395103, 'name': CloneVM_Task, 'duration_secs': 1.728587} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.965788] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Created linked-clone VM from snapshot [ 1017.966943] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4238fe4-6c90-44ee-b129-9aebc1fb0f02 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.979214] env[61978]: DEBUG nova.compute.provider_tree [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.987097] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Uploading image b1c651f7-625a-4460-839a-a49c838332bb {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1017.997181] env[61978]: INFO nova.compute.manager [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Took 39.91 seconds to build instance. [ 1018.117669] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395105, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126435} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.118122] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1018.118932] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ce5023-08c2-4b2e-85ca-e1cdb547f77c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.142589] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 0d48ae5d-7cc8-42b3-a993-44636e9cb171/0d48ae5d-7cc8-42b3-a993-44636e9cb171.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1018.143373] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36a13c49-cfd2-4f0b-bc8a-9fac2b25237a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.164975] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1018.164975] env[61978]: value = "task-1395106" [ 1018.164975] env[61978]: _type = "Task" [ 1018.164975] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.173991] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395106, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.291084] env[61978]: DEBUG nova.virt.hardware [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1018.291450] env[61978]: DEBUG nova.virt.hardware [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1018.291673] env[61978]: DEBUG nova.virt.hardware [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1018.293216] env[61978]: DEBUG nova.virt.hardware [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1018.293216] env[61978]: DEBUG nova.virt.hardware [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1018.293216] env[61978]: DEBUG nova.virt.hardware [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1018.293216] env[61978]: DEBUG nova.virt.hardware [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1018.293216] env[61978]: DEBUG nova.virt.hardware [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1018.293474] env[61978]: DEBUG nova.virt.hardware [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1018.293474] env[61978]: DEBUG nova.virt.hardware [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1018.293474] env[61978]: DEBUG nova.virt.hardware [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1018.296643] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8367c53d-73d7-47c8-8189-f51499a1273e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.306494] env[61978]: DEBUG oslo_vmware.rw_handles [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1018.306494] env[61978]: value = "vm-295911" [ 1018.306494] env[61978]: _type = "VirtualMachine" [ 1018.306494] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1018.308138] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5bcdf3-a587-4e30-a250-0af138798e52 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.313669] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8c9837f7-d432-4725-98a4-afa892cf3cad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.328120] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:10:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1018.337032] env[61978]: DEBUG oslo.service.loopingcall [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1018.342216] env[61978]: DEBUG oslo_vmware.rw_handles [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f71df3-a943-71b5-84f1-17d743063d14/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1018.342216] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1018.342216] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc23cc0e-754c-4f3b-8dfd-7de0ff515d08 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.344490] env[61978]: DEBUG oslo_vmware.rw_handles [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lease: (returnval){ [ 1018.344490] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526664b4-e149-6239-fc70-7729a1985b36" [ 1018.344490] env[61978]: _type = "HttpNfcLease" [ 1018.344490] env[61978]: } obtained for exporting VM: (result){ [ 1018.344490] env[61978]: value = "vm-295911" [ 1018.344490] env[61978]: _type = "VirtualMachine" [ 1018.344490] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1018.344788] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the lease: (returnval){ [ 1018.344788] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526664b4-e149-6239-fc70-7729a1985b36" [ 1018.344788] env[61978]: _type = "HttpNfcLease" [ 1018.344788] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1018.345032] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a8d6a9c-cac1-466d-83f6-dc175d1f545c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.365170] env[61978]: DEBUG oslo_vmware.rw_handles [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f71df3-a943-71b5-84f1-17d743063d14/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1018.365346] env[61978]: ERROR oslo_vmware.rw_handles [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f71df3-a943-71b5-84f1-17d743063d14/disk-0.vmdk due to incomplete transfer. [ 1018.365975] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c80b8e52-68ce-4eee-8895-8277bf0750fa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.370198] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1018.370198] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526664b4-e149-6239-fc70-7729a1985b36" [ 1018.370198] env[61978]: _type = "HttpNfcLease" [ 1018.370198] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1018.371494] env[61978]: DEBUG oslo_vmware.rw_handles [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1018.371494] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526664b4-e149-6239-fc70-7729a1985b36" [ 1018.371494] env[61978]: _type = "HttpNfcLease" [ 1018.371494] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1018.372335] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1018.372335] env[61978]: value = "task-1395108" [ 1018.372335] env[61978]: _type = "Task" [ 1018.372335] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.373136] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdb8d0b-2534-4e7a-b780-cd0bd787b7f6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.379443] env[61978]: DEBUG oslo_vmware.rw_handles [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f71df3-a943-71b5-84f1-17d743063d14/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1018.379646] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Uploaded image da902b14-4829-45fa-a2d7-251587b0567d to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1018.381920] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1018.382911] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a7c6cdf6-3993-4664-a84f-4bf94a9e4901 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.389584] env[61978]: DEBUG oslo_vmware.rw_handles [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b59d0c-9197-38ab-011d-ab51e257e894/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1018.389835] env[61978]: DEBUG oslo_vmware.rw_handles [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b59d0c-9197-38ab-011d-ab51e257e894/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1018.396992] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1018.396992] env[61978]: value = "task-1395109" [ 1018.396992] env[61978]: _type = "Task" [ 1018.396992] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.397292] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395108, 'name': CreateVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.464461] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1018.478298] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395109, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.490730] env[61978]: DEBUG nova.scheduler.client.report [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1018.497674] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1018.497674] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1018.497674] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1018.498188] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1018.498188] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1018.498188] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1018.498188] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1018.498188] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1018.498374] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1018.498558] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1018.498670] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1018.499644] env[61978]: DEBUG oslo_concurrency.lockutils [None req-27bc981b-b2c6-49f9-bd2a-d6540086d634 tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.956s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.502750] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2c034b-1f2c-443d-8038-8028bf0cb776 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.513660] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdce792-cd24-4cd7-ae84-710b01339000 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.532307] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8b513d3c-72a9-499b-9f6e-f08523c9a458 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.638120] env[61978]: DEBUG nova.compute.manager [req-734203d9-78b8-4981-8d5f-52ffc297c703 req-53eba5b9-6138-401b-8f50-521fdd5e7df6 service nova] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Received event network-vif-plugged-7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1018.638120] env[61978]: DEBUG oslo_concurrency.lockutils [req-734203d9-78b8-4981-8d5f-52ffc297c703 req-53eba5b9-6138-401b-8f50-521fdd5e7df6 service nova] Acquiring lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.638767] env[61978]: DEBUG oslo_concurrency.lockutils [req-734203d9-78b8-4981-8d5f-52ffc297c703 req-53eba5b9-6138-401b-8f50-521fdd5e7df6 service nova] Lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.638767] env[61978]: DEBUG oslo_concurrency.lockutils [req-734203d9-78b8-4981-8d5f-52ffc297c703 req-53eba5b9-6138-401b-8f50-521fdd5e7df6 service nova] Lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.638888] env[61978]: DEBUG nova.compute.manager [req-734203d9-78b8-4981-8d5f-52ffc297c703 req-53eba5b9-6138-401b-8f50-521fdd5e7df6 service nova] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] No waiting events found dispatching network-vif-plugged-7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1018.638984] env[61978]: WARNING nova.compute.manager [req-734203d9-78b8-4981-8d5f-52ffc297c703 req-53eba5b9-6138-401b-8f50-521fdd5e7df6 service nova] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Received unexpected event network-vif-plugged-7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f for instance with vm_state building and task_state spawning. [ 1018.680023] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395106, 'name': ReconfigVM_Task, 'duration_secs': 0.468638} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.680023] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 0d48ae5d-7cc8-42b3-a993-44636e9cb171/0d48ae5d-7cc8-42b3-a993-44636e9cb171.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.680023] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ef061359-f575-43ce-8c82-a40c848d2fe5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.685999] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1018.685999] env[61978]: value = "task-1395110" [ 1018.685999] env[61978]: _type = "Task" [ 1018.685999] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.697262] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395110, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.887074] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395108, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.901293] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d2afbedd-e4c5-4ba5-866f-ab8579b2b045 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquiring lock "interface-aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.901555] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d2afbedd-e4c5-4ba5-866f-ab8579b2b045 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lock "interface-aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.901882] env[61978]: DEBUG nova.objects.instance [None req-d2afbedd-e4c5-4ba5-866f-ab8579b2b045 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lazy-loading 'flavor' on Instance uuid aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.914172] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395109, 'name': Destroy_Task, 'duration_secs': 0.423926} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.914606] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Destroyed the VM [ 1018.915120] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1018.917978] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c35791a8-dec4-49ae-ab01-61f7112413e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.924229] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1018.924229] env[61978]: value = "task-1395111" [ 1018.924229] env[61978]: _type = "Task" [ 1018.924229] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.935154] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395111, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.999374] env[61978]: DEBUG nova.network.neutron [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Successfully updated port: 34e47e3b-49fc-4498-b258-cf27c276e3ac {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1018.999374] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.571s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.999374] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1019.004021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 23.120s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.056209] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] Acquiring lock "c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.056582] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] Lock "c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.198600] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395110, 'name': Rename_Task, 'duration_secs': 0.185636} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.198918] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1019.199305] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1825fd4-e455-495c-a8cd-142be47f795a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.209068] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1019.209068] env[61978]: value = "task-1395112" [ 1019.209068] env[61978]: _type = "Task" [ 1019.209068] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.220223] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395112, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.388344] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395108, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.410815] env[61978]: DEBUG nova.objects.instance [None req-d2afbedd-e4c5-4ba5-866f-ab8579b2b045 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lazy-loading 'pci_requests' on Instance uuid aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.436769] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395111, 'name': RemoveSnapshot_Task} progress is 78%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.503491] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "refresh_cache-dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.504960] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired lock "refresh_cache-dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.505219] env[61978]: DEBUG nova.network.neutron [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.508994] env[61978]: INFO nova.compute.claims [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1019.514567] env[61978]: DEBUG nova.compute.utils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1019.518842] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1019.518842] env[61978]: DEBUG nova.network.neutron [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1019.559546] env[61978]: DEBUG nova.compute.manager [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] [instance: c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1019.568410] env[61978]: DEBUG nova.policy [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '934ae9fb7c73480292add0c86672649e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b106bb3dbeb4bc9a4fc832c860a559d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1019.725925] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395112, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.849350] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.849563] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.890117] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395108, 'name': CreateVM_Task, 'duration_secs': 1.415083} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.890388] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1019.891919] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.892371] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.893705] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1019.894109] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad4225b0-0c2a-4c99-993a-dc2813d55678 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.900995] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1019.900995] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528b888b-0e11-4352-2492-4a966f6c8cf8" [ 1019.900995] env[61978]: _type = "Task" [ 1019.900995] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.912291] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528b888b-0e11-4352-2492-4a966f6c8cf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.914049] env[61978]: DEBUG nova.objects.base [None req-d2afbedd-e4c5-4ba5-866f-ab8579b2b045 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1019.914284] env[61978]: DEBUG nova.network.neutron [None req-d2afbedd-e4c5-4ba5-866f-ab8579b2b045 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1019.930557] env[61978]: DEBUG nova.network.neutron [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Successfully created port: a410cd2b-4149-421b-8f8e-287f5927da94 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1019.939679] env[61978]: DEBUG oslo_vmware.api [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395111, 'name': RemoveSnapshot_Task, 'duration_secs': 0.837303} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.939996] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1019.940243] env[61978]: INFO nova.compute.manager [None req-f958d912-a1cd-4f6d-bb28-6887d49514c6 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Took 15.76 seconds to snapshot the instance on the hypervisor. [ 1020.022026] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1020.030435] env[61978]: INFO nova.compute.resource_tracker [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating resource usage from migration 3552715b-f1cf-4686-a31b-df98ffe8a8b8 [ 1020.082807] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.087154] env[61978]: DEBUG nova.network.neutron [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1020.095982] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d2afbedd-e4c5-4ba5-866f-ab8579b2b045 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lock "interface-aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.193s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.222202] env[61978]: DEBUG oslo_vmware.api [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395112, 'name': PowerOnVM_Task, 'duration_secs': 0.592371} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.222573] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1020.222978] env[61978]: INFO nova.compute.manager [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Took 10.18 seconds to spawn the instance on the hypervisor. [ 1020.223602] env[61978]: DEBUG nova.compute.manager [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1020.225169] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5ec064-97e7-4bca-a031-4c2ab183e498 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.358910] env[61978]: DEBUG nova.compute.manager [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1020.363033] env[61978]: DEBUG nova.network.neutron [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Updating instance_info_cache with network_info: [{"id": "34e47e3b-49fc-4498-b258-cf27c276e3ac", "address": "fa:16:3e:6b:36:5c", "network": {"id": "f5fa8c48-5b4f-4e9a-a2c2-4bbe15fe743e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1900032982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b106bb3dbeb4bc9a4fc832c860a559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e47e3b-49", "ovs_interfaceid": "34e47e3b-49fc-4498-b258-cf27c276e3ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.416303] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528b888b-0e11-4352-2492-4a966f6c8cf8, 'name': SearchDatastore_Task, 'duration_secs': 0.01573} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.416813] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.417068] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1020.417327] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.417480] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.417663] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1020.418291] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d82b40c3-22ea-4a8e-b928-e1e6e8369f70 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.432839] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1020.433096] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1020.437075] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e17653ff-7fd3-40c4-b810-5c2a5724da68 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.444855] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1020.444855] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c7ed57-e099-d7fb-531f-d151c18ef04d" [ 1020.444855] env[61978]: _type = "Task" [ 1020.444855] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.460631] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c7ed57-e099-d7fb-531f-d151c18ef04d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.535465] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5fd0c6-d611-42a7-8740-ee6d50f4131a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.549114] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4951764-ab75-45b7-adb1-913bcc46a5e7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.589098] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062aaa30-cbf9-4efc-9d17-ac6addd85cc6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.603248] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027e7ef9-ecfa-4e4f-93b0-a7c6cf7db999 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.620579] env[61978]: DEBUG nova.compute.provider_tree [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.747036] env[61978]: INFO nova.compute.manager [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Took 40.34 seconds to build instance. [ 1020.868153] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Releasing lock "refresh_cache-dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.868462] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Instance network_info: |[{"id": "34e47e3b-49fc-4498-b258-cf27c276e3ac", "address": "fa:16:3e:6b:36:5c", "network": {"id": "f5fa8c48-5b4f-4e9a-a2c2-4bbe15fe743e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1900032982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b106bb3dbeb4bc9a4fc832c860a559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e47e3b-49", "ovs_interfaceid": "34e47e3b-49fc-4498-b258-cf27c276e3ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1020.870792] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:36:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34e47e3b-49fc-4498-b258-cf27c276e3ac', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1020.878347] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Creating folder: Project (5b106bb3dbeb4bc9a4fc832c860a559d). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1020.878950] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c08bb7a-8b33-438d-9e9f-32871f32ce86 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.891203] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.893118] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Created folder: Project (5b106bb3dbeb4bc9a4fc832c860a559d) in parent group-v295764. [ 1020.893342] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Creating folder: Instances. Parent ref: group-v295913. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1020.893625] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb36eea1-02e8-4132-9fce-336a1fc31eb4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.904768] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Created folder: Instances in parent group-v295913. [ 1020.905067] env[61978]: DEBUG oslo.service.loopingcall [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.905300] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1020.905544] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70bde0c4-014d-45ba-91b4-0d4c7c3d88ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.928917] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1020.928917] env[61978]: value = "task-1395115" [ 1020.928917] env[61978]: _type = "Task" [ 1020.928917] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.940457] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395115, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.957668] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c7ed57-e099-d7fb-531f-d151c18ef04d, 'name': SearchDatastore_Task, 'duration_secs': 0.017096} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.958973] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10d8a98d-e282-4b39-a8f7-162c567c479d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.966632] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1020.966632] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f2e242-6628-4079-df7b-5da6b8d97fcd" [ 1020.966632] env[61978]: _type = "Task" [ 1020.966632] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.978110] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f2e242-6628-4079-df7b-5da6b8d97fcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.041471] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1021.124313] env[61978]: DEBUG nova.scheduler.client.report [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1021.249058] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d640097-2a84-44d8-a5cc-875e83a6cf82 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "0d48ae5d-7cc8-42b3-a993-44636e9cb171" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.561s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.441302] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395115, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.480060] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f2e242-6628-4079-df7b-5da6b8d97fcd, 'name': SearchDatastore_Task, 'duration_secs': 0.019147} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.480914] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.480914] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 7e71c8de-1f94-4161-8ad8-a67792c5ce24/7e71c8de-1f94-4161-8ad8-a67792c5ce24.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1021.480914] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5124cef-d0f1-433e-872e-139af918e48b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.491441] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1021.491441] env[61978]: value = "task-1395116" [ 1021.491441] env[61978]: _type = "Task" [ 1021.491441] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.504119] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.631511] env[61978]: DEBUG oslo_concurrency.lockutils [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.629s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.631635] env[61978]: INFO nova.compute.manager [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Migrating [ 1021.639160] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.617s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.640722] env[61978]: INFO nova.compute.claims [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1021.685814] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1021.686183] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1021.686423] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1021.686632] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1021.686842] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1021.687088] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1021.687360] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1021.687580] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1021.687846] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1021.688057] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1021.688323] env[61978]: DEBUG nova.virt.hardware [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1021.696628] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764d45d0-03f3-43f9-a3ab-b9123ab6060e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.707760] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0a5696-f176-4530-a00f-85c1b2c9c69a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.726471] env[61978]: DEBUG nova.network.neutron [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Successfully updated port: a410cd2b-4149-421b-8f8e-287f5927da94 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1021.732780] env[61978]: DEBUG oslo_vmware.rw_handles [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521bec9d-fd5a-3cf0-0877-9018c24e20c6/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1021.733898] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff73eaee-5c9e-4809-a4de-4f818a260b02 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.742162] env[61978]: DEBUG oslo_vmware.rw_handles [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521bec9d-fd5a-3cf0-0877-9018c24e20c6/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1021.742162] env[61978]: ERROR oslo_vmware.rw_handles [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521bec9d-fd5a-3cf0-0877-9018c24e20c6/disk-0.vmdk due to incomplete transfer. [ 1021.742448] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-16ca140c-36b6-4836-ab4c-2da7b63ae847 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.751935] env[61978]: DEBUG oslo_vmware.rw_handles [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521bec9d-fd5a-3cf0-0877-9018c24e20c6/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1021.752373] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Uploaded image f2517d29-6937-411e-90af-450d799a9c72 to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1021.754066] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1021.754349] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e0718902-686a-4176-b89c-6c9a0b6bcad3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.763430] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1021.763430] env[61978]: value = "task-1395117" [ 1021.763430] env[61978]: _type = "Task" [ 1021.763430] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.779583] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395117, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.942715] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395115, 'name': CreateVM_Task, 'duration_secs': 0.520447} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.943080] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1021.943794] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.944099] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.944351] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1021.944659] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc7df1ac-7dd9-46c6-b253-2ac08495fa46 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.951930] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1021.951930] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5265141d-d329-ca04-0bed-4b81910bbbd9" [ 1021.951930] env[61978]: _type = "Task" [ 1021.951930] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.961602] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5265141d-d329-ca04-0bed-4b81910bbbd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.004446] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395116, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.157197] env[61978]: DEBUG oslo_concurrency.lockutils [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.160179] env[61978]: DEBUG oslo_concurrency.lockutils [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.160179] env[61978]: DEBUG nova.network.neutron [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1022.230217] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "refresh_cache-adf25af8-28c4-444e-b849-88d643f57dcf" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.230409] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired lock "refresh_cache-adf25af8-28c4-444e-b849-88d643f57dcf" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.231626] env[61978]: DEBUG nova.network.neutron [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1022.277621] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395117, 'name': Destroy_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.281322] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "3a30ecc4-455f-49cf-98e8-d38be6a1c5a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.281884] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "3a30ecc4-455f-49cf-98e8-d38be6a1c5a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.282207] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "3a30ecc4-455f-49cf-98e8-d38be6a1c5a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.282428] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "3a30ecc4-455f-49cf-98e8-d38be6a1c5a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.282744] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "3a30ecc4-455f-49cf-98e8-d38be6a1c5a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.285217] env[61978]: INFO nova.compute.manager [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Terminating instance [ 1022.287818] env[61978]: DEBUG nova.compute.manager [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1022.288039] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1022.289122] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba4702a-0522-4374-b0a6-3ef9c4a5de32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.298941] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.299292] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a91815fc-6f1e-4091-8372-c6151f92e7b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.309140] env[61978]: DEBUG oslo_vmware.api [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 1022.309140] env[61978]: value = "task-1395118" [ 1022.309140] env[61978]: _type = "Task" [ 1022.309140] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.321069] env[61978]: DEBUG oslo_vmware.api [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1395118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.390149] env[61978]: DEBUG nova.compute.manager [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Received event network-changed-7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1022.390149] env[61978]: DEBUG nova.compute.manager [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Refreshing instance network info cache due to event network-changed-7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1022.390406] env[61978]: DEBUG oslo_concurrency.lockutils [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] Acquiring lock "refresh_cache-7e71c8de-1f94-4161-8ad8-a67792c5ce24" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.390805] env[61978]: DEBUG oslo_concurrency.lockutils [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] Acquired lock "refresh_cache-7e71c8de-1f94-4161-8ad8-a67792c5ce24" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.392336] env[61978]: DEBUG nova.network.neutron [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Refreshing network info cache for port 7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1022.471931] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5265141d-d329-ca04-0bed-4b81910bbbd9, 'name': SearchDatastore_Task, 'duration_secs': 0.067389} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.472312] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.472575] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1022.472861] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.473029] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.474314] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1022.474701] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd057f24-239f-475d-81e4-fbfd01768237 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.488498] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1022.488498] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1022.489181] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f922b49-7f67-426d-8917-ad377a445469 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.498199] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1022.498199] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5282398e-01b8-7d4d-c3e2-4e2987cdd20b" [ 1022.498199] env[61978]: _type = "Task" [ 1022.498199] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.507144] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395116, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.763305} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.509022] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 7e71c8de-1f94-4161-8ad8-a67792c5ce24/7e71c8de-1f94-4161-8ad8-a67792c5ce24.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1022.509022] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1022.509022] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-065e54e3-3e4b-4528-bbf5-0d48c5d4abea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.514526] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5282398e-01b8-7d4d-c3e2-4e2987cdd20b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.522574] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1022.522574] env[61978]: value = "task-1395119" [ 1022.522574] env[61978]: _type = "Task" [ 1022.522574] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.551213] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395119, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.777360] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395117, 'name': Destroy_Task, 'duration_secs': 0.78969} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.777787] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Destroyed the VM [ 1022.777892] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1022.778230] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a8bcae29-0c3d-4179-9366-d31c18a70a1d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.783493] env[61978]: DEBUG nova.network.neutron [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1022.795467] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1022.795467] env[61978]: value = "task-1395120" [ 1022.795467] env[61978]: _type = "Task" [ 1022.795467] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.809964] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395120, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.823954] env[61978]: DEBUG oslo_vmware.api [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1395118, 'name': PowerOffVM_Task, 'duration_secs': 0.334236} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.832474] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1022.832685] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1022.833024] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0734147f-8647-4a48-9fd5-f2a3a60469f9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.931484] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1022.931705] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1022.931944] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Deleting the datastore file [datastore1] 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1022.932387] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05371b1b-a7d4-4b2c-9846-a1ee280bb844 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.947658] env[61978]: DEBUG oslo_vmware.api [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 1022.947658] env[61978]: value = "task-1395122" [ 1022.947658] env[61978]: _type = "Task" [ 1022.947658] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.961699] env[61978]: DEBUG oslo_vmware.api [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1395122, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.015939] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5282398e-01b8-7d4d-c3e2-4e2987cdd20b, 'name': SearchDatastore_Task, 'duration_secs': 0.028003} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.016254] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e65cebc-3a71-41d1-8c11-18c745305098 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.024884] env[61978]: DEBUG nova.compute.manager [req-c90b3a3f-b389-4fa0-850c-1b021f36d8d1 req-5d3e3865-6959-4e96-9e88-1bafbc1c3c58 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Received event network-changed-6324dacc-b741-4de5-8ded-34326888d25f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1023.025162] env[61978]: DEBUG nova.compute.manager [req-c90b3a3f-b389-4fa0-850c-1b021f36d8d1 req-5d3e3865-6959-4e96-9e88-1bafbc1c3c58 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Refreshing instance network info cache due to event network-changed-6324dacc-b741-4de5-8ded-34326888d25f. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1023.025397] env[61978]: DEBUG oslo_concurrency.lockutils [req-c90b3a3f-b389-4fa0-850c-1b021f36d8d1 req-5d3e3865-6959-4e96-9e88-1bafbc1c3c58 service nova] Acquiring lock "refresh_cache-4c7053ee-7c44-49ee-8d30-bf14686c6b1c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.025550] env[61978]: DEBUG oslo_concurrency.lockutils [req-c90b3a3f-b389-4fa0-850c-1b021f36d8d1 req-5d3e3865-6959-4e96-9e88-1bafbc1c3c58 service nova] Acquired lock "refresh_cache-4c7053ee-7c44-49ee-8d30-bf14686c6b1c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.025738] env[61978]: DEBUG nova.network.neutron [req-c90b3a3f-b389-4fa0-850c-1b021f36d8d1 req-5d3e3865-6959-4e96-9e88-1bafbc1c3c58 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Refreshing network info cache for port 6324dacc-b741-4de5-8ded-34326888d25f {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1023.032496] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1023.032496] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]529368b6-cc1a-4881-92ed-b64fde50bac6" [ 1023.032496] env[61978]: _type = "Task" [ 1023.032496] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.045119] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395119, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081427} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.048485] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1023.050140] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa2187a-a3c6-4305-8830-7bbf75443050 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.057583] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529368b6-cc1a-4881-92ed-b64fde50bac6, 'name': SearchDatastore_Task, 'duration_secs': 0.013293} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.060993] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.061342] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56/dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1023.061832] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5058cdf-69da-4d8e-8803-ab3c199c995c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.082760] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 7e71c8de-1f94-4161-8ad8-a67792c5ce24/7e71c8de-1f94-4161-8ad8-a67792c5ce24.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.086748] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5964c0d-0df2-407d-bdce-a1b86e86488d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.108555] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1023.108555] env[61978]: value = "task-1395123" [ 1023.108555] env[61978]: _type = "Task" [ 1023.108555] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.117038] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1023.117038] env[61978]: value = "task-1395124" [ 1023.117038] env[61978]: _type = "Task" [ 1023.117038] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.120995] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395123, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.133591] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395124, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.158479] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquiring lock "aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.158771] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lock "aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.158995] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquiring lock "aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.159211] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lock "aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.159390] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lock "aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.162045] env[61978]: INFO nova.compute.manager [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Terminating instance [ 1023.164847] env[61978]: DEBUG nova.compute.manager [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1023.165163] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1023.166057] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c083b2b9-2ee3-4c47-a249-098a9f2207ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.178036] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1023.178299] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81651cf0-630b-4e94-b742-0edd9302b1a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.186877] env[61978]: DEBUG oslo_vmware.api [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Waiting for the task: (returnval){ [ 1023.186877] env[61978]: value = "task-1395125" [ 1023.186877] env[61978]: _type = "Task" [ 1023.186877] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.199161] env[61978]: DEBUG oslo_vmware.api [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.226250] env[61978]: DEBUG nova.network.neutron [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Updating instance_info_cache with network_info: [{"id": "a410cd2b-4149-421b-8f8e-287f5927da94", "address": "fa:16:3e:66:00:e6", "network": {"id": "f5fa8c48-5b4f-4e9a-a2c2-4bbe15fe743e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1900032982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b106bb3dbeb4bc9a4fc832c860a559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa410cd2b-41", "ovs_interfaceid": "a410cd2b-4149-421b-8f8e-287f5927da94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.243021] env[61978]: DEBUG nova.network.neutron [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance_info_cache with network_info: [{"id": "fcd64700-31ef-4310-8986-b22e515b1c55", "address": "fa:16:3e:a7:22:5a", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd64700-31", "ovs_interfaceid": "fcd64700-31ef-4310-8986-b22e515b1c55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.309130] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395120, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.332030] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf05d97-728c-4736-8196-1d4e0a63ae2d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.342340] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3aa59de-75f8-4c99-9c45-e5a8682856a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.350937] env[61978]: DEBUG nova.network.neutron [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Updated VIF entry in instance network info cache for port 7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1023.351370] env[61978]: DEBUG nova.network.neutron [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Updating instance_info_cache with network_info: [{"id": "7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f", "address": "fa:16:3e:ca:10:89", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f676fff-3b", "ovs_interfaceid": "7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.382108] env[61978]: DEBUG oslo_concurrency.lockutils [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] Releasing lock "refresh_cache-7e71c8de-1f94-4161-8ad8-a67792c5ce24" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.382394] env[61978]: DEBUG nova.compute.manager [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Received event network-vif-plugged-34e47e3b-49fc-4498-b258-cf27c276e3ac {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1023.382621] env[61978]: DEBUG oslo_concurrency.lockutils [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] Acquiring lock "dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.382881] env[61978]: DEBUG oslo_concurrency.lockutils [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] Lock "dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.383056] env[61978]: DEBUG oslo_concurrency.lockutils [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] Lock "dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.383260] env[61978]: DEBUG nova.compute.manager [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] No waiting events found dispatching network-vif-plugged-34e47e3b-49fc-4498-b258-cf27c276e3ac {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1023.383445] env[61978]: WARNING nova.compute.manager [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Received unexpected event network-vif-plugged-34e47e3b-49fc-4498-b258-cf27c276e3ac for instance with vm_state building and task_state spawning. [ 1023.383642] env[61978]: DEBUG nova.compute.manager [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Received event network-changed-34e47e3b-49fc-4498-b258-cf27c276e3ac {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1023.383778] env[61978]: DEBUG nova.compute.manager [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Refreshing instance network info cache due to event network-changed-34e47e3b-49fc-4498-b258-cf27c276e3ac. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1023.384049] env[61978]: DEBUG oslo_concurrency.lockutils [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] Acquiring lock "refresh_cache-dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.385264] env[61978]: DEBUG oslo_concurrency.lockutils [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] Acquired lock "refresh_cache-dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.385264] env[61978]: DEBUG nova.network.neutron [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Refreshing network info cache for port 34e47e3b-49fc-4498-b258-cf27c276e3ac {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1023.386515] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08360217-0991-4292-8b76-9e0ff0aa0a31 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.398355] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304af505-8c45-4ae3-a491-02fa0d689f8d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.418661] env[61978]: DEBUG nova.compute.provider_tree [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.455399] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3a4b5a-2455-42e8-af90-9c74d66369ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.464462] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-310e3dac-3416-4440-a6ec-a7e65e17ce16 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Suspending the VM {{(pid=61978) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1023.469162] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-f6cc6fea-98bf-4d0f-867b-4983fc620dc6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.471490] env[61978]: DEBUG oslo_vmware.api [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1395122, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252193} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.471872] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1023.472187] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1023.472413] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1023.472592] env[61978]: INFO nova.compute.manager [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1023.472869] env[61978]: DEBUG oslo.service.loopingcall [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1023.473626] env[61978]: DEBUG nova.compute.manager [-] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1023.473722] env[61978]: DEBUG nova.network.neutron [-] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1023.481441] env[61978]: DEBUG oslo_vmware.api [None req-310e3dac-3416-4440-a6ec-a7e65e17ce16 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1023.481441] env[61978]: value = "task-1395126" [ 1023.481441] env[61978]: _type = "Task" [ 1023.481441] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.493975] env[61978]: DEBUG oslo_vmware.api [None req-310e3dac-3416-4440-a6ec-a7e65e17ce16 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395126, 'name': SuspendVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.628808] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395123, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.637396] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.704846] env[61978]: DEBUG oslo_vmware.api [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395125, 'name': PowerOffVM_Task, 'duration_secs': 0.234919} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.705674] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1023.707016] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1023.707016] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53c87230-dafb-4ea7-a62e-4cb523e875e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.726925] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Releasing lock "refresh_cache-adf25af8-28c4-444e-b849-88d643f57dcf" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.727116] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Instance network_info: |[{"id": "a410cd2b-4149-421b-8f8e-287f5927da94", "address": "fa:16:3e:66:00:e6", "network": {"id": "f5fa8c48-5b4f-4e9a-a2c2-4bbe15fe743e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1900032982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b106bb3dbeb4bc9a4fc832c860a559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa410cd2b-41", "ovs_interfaceid": "a410cd2b-4149-421b-8f8e-287f5927da94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1023.727673] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:00:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a410cd2b-4149-421b-8f8e-287f5927da94', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1023.738032] env[61978]: DEBUG oslo.service.loopingcall [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1023.738339] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1023.738594] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89484517-65bc-4c9f-b8a6-bc85cbbb50f6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.759586] env[61978]: DEBUG oslo_concurrency.lockutils [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.770212] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1023.770212] env[61978]: value = "task-1395128" [ 1023.770212] env[61978]: _type = "Task" [ 1023.770212] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.780644] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395128, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.805899] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1023.806165] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1023.806353] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Deleting the datastore file [datastore1] aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1023.806638] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-456ed8a8-e29b-4c1d-9df6-12997a45a9ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.816268] env[61978]: DEBUG oslo_vmware.api [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395120, 'name': RemoveSnapshot_Task, 'duration_secs': 1.020022} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.820011] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1023.820011] env[61978]: INFO nova.compute.manager [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Took 15.11 seconds to snapshot the instance on the hypervisor. [ 1023.822520] env[61978]: DEBUG oslo_vmware.api [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Waiting for the task: (returnval){ [ 1023.822520] env[61978]: value = "task-1395129" [ 1023.822520] env[61978]: _type = "Task" [ 1023.822520] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.839063] env[61978]: DEBUG oslo_vmware.api [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.926595] env[61978]: DEBUG nova.scheduler.client.report [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1023.995403] env[61978]: DEBUG oslo_vmware.api [None req-310e3dac-3416-4440-a6ec-a7e65e17ce16 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395126, 'name': SuspendVM_Task} progress is 70%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.122670] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395123, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.694475} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.125987] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56/dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1024.126513] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1024.126967] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e453fa2c-1ee1-4eda-9664-efa0ddbc2dd4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.136693] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395124, 'name': ReconfigVM_Task, 'duration_secs': 0.613786} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.138486] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 7e71c8de-1f94-4161-8ad8-a67792c5ce24/7e71c8de-1f94-4161-8ad8-a67792c5ce24.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.140129] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1024.140129] env[61978]: value = "task-1395130" [ 1024.140129] env[61978]: _type = "Task" [ 1024.140129] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.140129] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f14a903-d6a7-4bb7-84ad-16851c038815 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.152638] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395130, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.154282] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1024.154282] env[61978]: value = "task-1395131" [ 1024.154282] env[61978]: _type = "Task" [ 1024.154282] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.165239] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395131, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.255740] env[61978]: DEBUG nova.network.neutron [req-c90b3a3f-b389-4fa0-850c-1b021f36d8d1 req-5d3e3865-6959-4e96-9e88-1bafbc1c3c58 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Updated VIF entry in instance network info cache for port 6324dacc-b741-4de5-8ded-34326888d25f. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1024.256123] env[61978]: DEBUG nova.network.neutron [req-c90b3a3f-b389-4fa0-850c-1b021f36d8d1 req-5d3e3865-6959-4e96-9e88-1bafbc1c3c58 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Updating instance_info_cache with network_info: [{"id": "6324dacc-b741-4de5-8ded-34326888d25f", "address": "fa:16:3e:36:fb:4d", "network": {"id": "c16671c6-39f3-456e-b159-3af6a9553564", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1089542181-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "add5612301884f668bbe80681629e8d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6324dacc-b7", "ovs_interfaceid": "6324dacc-b741-4de5-8ded-34326888d25f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.279104] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395128, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.342931] env[61978]: DEBUG oslo_vmware.api [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.369151] env[61978]: DEBUG nova.network.neutron [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Updated VIF entry in instance network info cache for port 34e47e3b-49fc-4498-b258-cf27c276e3ac. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1024.369535] env[61978]: DEBUG nova.network.neutron [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Updating instance_info_cache with network_info: [{"id": "34e47e3b-49fc-4498-b258-cf27c276e3ac", "address": "fa:16:3e:6b:36:5c", "network": {"id": "f5fa8c48-5b4f-4e9a-a2c2-4bbe15fe743e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1900032982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b106bb3dbeb4bc9a4fc832c860a559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e47e3b-49", "ovs_interfaceid": "34e47e3b-49fc-4498-b258-cf27c276e3ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.400674] env[61978]: DEBUG nova.compute.manager [None req-665a8dde-1cad-4b23-9946-539ac120e23e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Found 2 images (rotation: 2) {{(pid=61978) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 1024.426560] env[61978]: DEBUG nova.compute.manager [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Received event network-vif-plugged-a410cd2b-4149-421b-8f8e-287f5927da94 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1024.426769] env[61978]: DEBUG oslo_concurrency.lockutils [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] Acquiring lock "adf25af8-28c4-444e-b849-88d643f57dcf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.426988] env[61978]: DEBUG oslo_concurrency.lockutils [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] Lock "adf25af8-28c4-444e-b849-88d643f57dcf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.427194] env[61978]: DEBUG oslo_concurrency.lockutils [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] Lock "adf25af8-28c4-444e-b849-88d643f57dcf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.427368] env[61978]: DEBUG nova.compute.manager [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] No waiting events found dispatching network-vif-plugged-a410cd2b-4149-421b-8f8e-287f5927da94 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1024.427920] env[61978]: WARNING nova.compute.manager [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Received unexpected event network-vif-plugged-a410cd2b-4149-421b-8f8e-287f5927da94 for instance with vm_state building and task_state spawning. [ 1024.427920] env[61978]: DEBUG nova.compute.manager [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Received event network-changed-a410cd2b-4149-421b-8f8e-287f5927da94 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1024.427920] env[61978]: DEBUG nova.compute.manager [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Refreshing instance network info cache due to event network-changed-a410cd2b-4149-421b-8f8e-287f5927da94. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1024.428084] env[61978]: DEBUG oslo_concurrency.lockutils [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] Acquiring lock "refresh_cache-adf25af8-28c4-444e-b849-88d643f57dcf" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.428194] env[61978]: DEBUG oslo_concurrency.lockutils [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] Acquired lock "refresh_cache-adf25af8-28c4-444e-b849-88d643f57dcf" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.428354] env[61978]: DEBUG nova.network.neutron [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Refreshing network info cache for port a410cd2b-4149-421b-8f8e-287f5927da94 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1024.431576] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.792s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.433102] env[61978]: DEBUG nova.compute.manager [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1024.434293] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 23.258s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.434468] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.434615] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1024.434901] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.128s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.435194] env[61978]: DEBUG nova.objects.instance [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61978) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1024.438321] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4364d9b-8ff2-4b83-b356-73c628c79ebe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.448053] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a579fd0-e4d7-4962-bed5-b941a307b390 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.468206] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028e0c75-49e2-462a-a91b-140c183453b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.478834] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63326e2e-1b4a-4969-b613-951fc340a007 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.518037] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178137MB free_disk=184GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1024.518037] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.520658] env[61978]: DEBUG oslo_vmware.api [None req-310e3dac-3416-4440-a6ec-a7e65e17ce16 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395126, 'name': SuspendVM_Task} progress is 70%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.656911] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395130, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075505} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.661302] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1024.661910] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feae52f9-e3a0-42b0-bfe6-98fdbd864a6d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.674284] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395131, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.697711] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56/dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1024.698072] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cb9118d-8fe8-4c24-87d8-b4b20064dc90 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.715084] env[61978]: DEBUG nova.network.neutron [-] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.720662] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1024.720662] env[61978]: value = "task-1395132" [ 1024.720662] env[61978]: _type = "Task" [ 1024.720662] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.729739] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395132, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.759474] env[61978]: DEBUG oslo_concurrency.lockutils [req-c90b3a3f-b389-4fa0-850c-1b021f36d8d1 req-5d3e3865-6959-4e96-9e88-1bafbc1c3c58 service nova] Releasing lock "refresh_cache-4c7053ee-7c44-49ee-8d30-bf14686c6b1c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.780161] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395128, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.835845] env[61978]: DEBUG oslo_vmware.api [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.844692] env[61978]: DEBUG nova.compute.manager [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1024.845598] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04da759-cced-4ec9-8345-7f916e9a6df8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.872011] env[61978]: DEBUG oslo_concurrency.lockutils [req-b2f6c744-7910-486a-bf52-b6d2f690f7ac req-532eed3a-7196-4834-aa72-050467e84c7f service nova] Releasing lock "refresh_cache-dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.940161] env[61978]: DEBUG nova.compute.utils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1024.942442] env[61978]: DEBUG nova.compute.manager [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1024.942748] env[61978]: DEBUG nova.network.neutron [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1024.996786] env[61978]: DEBUG oslo_vmware.api [None req-310e3dac-3416-4440-a6ec-a7e65e17ce16 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395126, 'name': SuspendVM_Task} progress is 70%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.037939] env[61978]: DEBUG nova.policy [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2ebd638c24f4a5d959ad19796744b37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '26cb7552530047c5867347d62195121e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1025.167187] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395131, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.218125] env[61978]: INFO nova.compute.manager [-] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Took 1.74 seconds to deallocate network for instance. [ 1025.246578] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395132, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.280544] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c248ede-1397-4bb6-9a58-2c6fd408820b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.289942] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395128, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.306526] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance 'f930ab49-c215-4b2e-92b1-21c0d52a70eb' progress to 0 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1025.310790] env[61978]: DEBUG nova.network.neutron [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Updated VIF entry in instance network info cache for port a410cd2b-4149-421b-8f8e-287f5927da94. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1025.311181] env[61978]: DEBUG nova.network.neutron [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Updating instance_info_cache with network_info: [{"id": "a410cd2b-4149-421b-8f8e-287f5927da94", "address": "fa:16:3e:66:00:e6", "network": {"id": "f5fa8c48-5b4f-4e9a-a2c2-4bbe15fe743e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1900032982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b106bb3dbeb4bc9a4fc832c860a559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa410cd2b-41", "ovs_interfaceid": "a410cd2b-4149-421b-8f8e-287f5927da94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.339030] env[61978]: DEBUG oslo_vmware.api [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.339844] env[61978]: DEBUG nova.compute.manager [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1025.342247] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183902df-26ae-461e-953e-74915b225d49 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.358902] env[61978]: INFO nova.compute.manager [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] instance snapshotting [ 1025.361387] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf792077-0b5b-432c-bb6c-44ca4cbf4747 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.384069] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b77c65-d577-47ad-ace9-98e7309b221a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.454066] env[61978]: DEBUG nova.network.neutron [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Successfully created port: 0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1025.457147] env[61978]: DEBUG nova.compute.manager [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1025.462660] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a492422f-af0d-4587-9a9b-8f2d7a98ad24 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.025s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.462660] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.719s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.462660] env[61978]: DEBUG nova.objects.instance [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lazy-loading 'resources' on Instance uuid dd686727-fc33-4dc4-b386-aabec27cf215 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1025.496409] env[61978]: DEBUG oslo_vmware.api [None req-310e3dac-3416-4440-a6ec-a7e65e17ce16 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395126, 'name': SuspendVM_Task} progress is 70%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.669179] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395131, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.733216] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.737809] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.743257] env[61978]: DEBUG nova.network.neutron [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Successfully created port: 8664e01e-1422-4709-85a0-c3684ca5733c {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1025.782931] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395128, 'name': CreateVM_Task, 'duration_secs': 1.87642} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.783209] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1025.784051] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.784343] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.784639] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1025.784936] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7852018f-3985-4fc0-811a-096fb7713c40 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.790821] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1025.790821] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52dcd736-5d10-81e1-ee50-80e3c5004d4c" [ 1025.790821] env[61978]: _type = "Task" [ 1025.790821] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.800814] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52dcd736-5d10-81e1-ee50-80e3c5004d4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.815257] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1025.815785] env[61978]: DEBUG oslo_concurrency.lockutils [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] Releasing lock "refresh_cache-adf25af8-28c4-444e-b849-88d643f57dcf" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.816114] env[61978]: DEBUG nova.compute.manager [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Received event network-vif-deleted-f10bef80-f5ec-40ab-bb8e-c1c9973e4e66 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1025.816317] env[61978]: INFO nova.compute.manager [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Neutron deleted interface f10bef80-f5ec-40ab-bb8e-c1c9973e4e66; detaching it from the instance and deleting it from the info cache [ 1025.816532] env[61978]: DEBUG nova.network.neutron [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.818142] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87635956-4392-4781-9074-44a2c834c753 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.828481] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1025.828481] env[61978]: value = "task-1395133" [ 1025.828481] env[61978]: _type = "Task" [ 1025.828481] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.844248] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395133, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.847233] env[61978]: DEBUG oslo_vmware.api [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Task: {'id': task-1395129, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.925701} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.847578] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1025.847783] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1025.847969] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1025.848170] env[61978]: INFO nova.compute.manager [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Took 2.68 seconds to destroy the instance on the hypervisor. [ 1025.848429] env[61978]: DEBUG oslo.service.loopingcall [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1025.848636] env[61978]: DEBUG nova.compute.manager [-] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1025.848729] env[61978]: DEBUG nova.network.neutron [-] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1025.851890] env[61978]: INFO nova.compute.manager [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] instance snapshotting [ 1025.851890] env[61978]: DEBUG nova.objects.instance [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'flavor' on Instance uuid c17c986e-c008-4414-8dd1-4ea836458048 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1025.899112] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1025.899478] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b2da59b1-14d6-4116-970d-472eacbce4ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.909592] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1025.909592] env[61978]: value = "task-1395134" [ 1025.909592] env[61978]: _type = "Task" [ 1025.909592] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.922086] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395134, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.998529] env[61978]: DEBUG oslo_vmware.api [None req-310e3dac-3416-4440-a6ec-a7e65e17ce16 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395126, 'name': SuspendVM_Task, 'duration_secs': 2.421485} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.998779] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-310e3dac-3416-4440-a6ec-a7e65e17ce16 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Suspended the VM {{(pid=61978) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1025.998899] env[61978]: DEBUG nova.compute.manager [None req-310e3dac-3416-4440-a6ec-a7e65e17ce16 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1026.006470] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6399829-310e-460a-93d3-18e1faca3687 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.167944] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395131, 'name': Rename_Task, 'duration_secs': 1.62615} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.168452] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1026.168685] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce235ee4-404f-439f-9c25-9b29c2e00293 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.175884] env[61978]: DEBUG nova.network.neutron [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Successfully created port: 30edec9f-85c0-41f9-ab16-aea72cc18c06 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1026.182401] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1026.182401] env[61978]: value = "task-1395135" [ 1026.182401] env[61978]: _type = "Task" [ 1026.182401] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.195264] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395135, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.235713] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395132, 'name': ReconfigVM_Task, 'duration_secs': 1.172262} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.236902] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Reconfigured VM instance instance-00000035 to attach disk [datastore2] dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56/dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1026.237779] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c6a5f85-047f-4593-aec1-301974f23d29 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.246567] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1026.246567] env[61978]: value = "task-1395136" [ 1026.246567] env[61978]: _type = "Task" [ 1026.246567] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.263250] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395136, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.303650] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52dcd736-5d10-81e1-ee50-80e3c5004d4c, 'name': SearchDatastore_Task, 'duration_secs': 0.011584} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.303960] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.304242] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1026.304477] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.304599] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.304786] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.305152] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5f527ce-ea57-4602-a807-1439150a0c67 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.315927] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.316213] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1026.317244] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1569a698-f4b1-4300-8702-4d3521d9b824 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.321896] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c54d55e-dac4-44c5-ba9d-013d3a78f643 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.325432] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1026.325432] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5200a319-ef24-622b-78b3-fd123ad45cb9" [ 1026.325432] env[61978]: _type = "Task" [ 1026.325432] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.340809] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b749a0d-e2e1-468f-81ec-5222fa2967f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.361964] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5200a319-ef24-622b-78b3-fd123ad45cb9, 'name': SearchDatastore_Task, 'duration_secs': 0.011436} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.365564] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a94c773-80ca-4e65-b64a-3b1e0e17c58b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.371964] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38d8b71b-24d6-44e7-9330-ab9915a00dc8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.375143] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395133, 'name': PowerOffVM_Task, 'duration_secs': 0.219955} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.376120] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1026.376120] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance 'f930ab49-c215-4b2e-92b1-21c0d52a70eb' progress to 17 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1026.418196] env[61978]: DEBUG nova.compute.manager [req-e8b36ac4-ab2a-4dbe-b184-2252979286fa req-06636298-2126-4657-9d18-02cc51ef051f service nova] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Detach interface failed, port_id=f10bef80-f5ec-40ab-bb8e-c1c9973e4e66, reason: Instance 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1026.420230] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68eb3931-78bc-4207-b3d5-f390b58e8da4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.426784] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1026.426784] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522f7932-ae7a-4a14-9ccd-3d831910fbb5" [ 1026.426784] env[61978]: _type = "Task" [ 1026.426784] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.440726] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395134, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.451113] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522f7932-ae7a-4a14-9ccd-3d831910fbb5, 'name': SearchDatastore_Task, 'duration_secs': 0.013175} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.451426] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.451697] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] adf25af8-28c4-444e-b849-88d643f57dcf/adf25af8-28c4-444e-b849-88d643f57dcf.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1026.452089] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8518f84-5fc1-4b45-8a99-4b28b1003225 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.461540] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1026.461540] env[61978]: value = "task-1395137" [ 1026.461540] env[61978]: _type = "Task" [ 1026.461540] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.473209] env[61978]: DEBUG nova.compute.manager [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1026.481208] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.521537] env[61978]: DEBUG nova.virt.hardware [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1026.521868] env[61978]: DEBUG nova.virt.hardware [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1026.521950] env[61978]: DEBUG nova.virt.hardware [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1026.522141] env[61978]: DEBUG nova.virt.hardware [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1026.522289] env[61978]: DEBUG nova.virt.hardware [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1026.522447] env[61978]: DEBUG nova.virt.hardware [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1026.522659] env[61978]: DEBUG nova.virt.hardware [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1026.522823] env[61978]: DEBUG nova.virt.hardware [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1026.523097] env[61978]: DEBUG nova.virt.hardware [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1026.523296] env[61978]: DEBUG nova.virt.hardware [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1026.523479] env[61978]: DEBUG nova.virt.hardware [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1026.526399] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe83cc75-0692-4dc7-84ab-edea4192a5d4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.537103] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f5952a-4737-4733-9b20-02fdf95c716c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.543388] env[61978]: DEBUG nova.compute.manager [req-0bbb4198-61b2-49a6-bdc7-812e388e66e0 req-12c6e89d-d1d3-4afa-82e1-cd9c4ff6f0db service nova] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Received event network-vif-deleted-948f3af3-d436-4415-b7d8-edefe3d32c25 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1026.544182] env[61978]: INFO nova.compute.manager [req-0bbb4198-61b2-49a6-bdc7-812e388e66e0 req-12c6e89d-d1d3-4afa-82e1-cd9c4ff6f0db service nova] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Neutron deleted interface 948f3af3-d436-4415-b7d8-edefe3d32c25; detaching it from the instance and deleting it from the info cache [ 1026.544182] env[61978]: DEBUG nova.network.neutron [req-0bbb4198-61b2-49a6-bdc7-812e388e66e0 req-12c6e89d-d1d3-4afa-82e1-cd9c4ff6f0db service nova] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.613728] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260b5c1f-38fe-4044-90ae-ebcf12ee8d0f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.622836] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbc7690-19be-4164-be98-b4a47f9a0ad3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.654513] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acebb312-0c0f-4721-94f4-83ab774daae0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.665087] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11a7a10-563d-4609-b750-8389f5ee12b2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.680846] env[61978]: DEBUG nova.compute.provider_tree [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.692955] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395135, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.760438] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395136, 'name': Rename_Task, 'duration_secs': 0.183355} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.760438] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1026.760806] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-723cbb71-3d0e-4732-b230-235b053acb25 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.770884] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1026.770884] env[61978]: value = "task-1395138" [ 1026.770884] env[61978]: _type = "Task" [ 1026.770884] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.780932] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395138, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.835964] env[61978]: DEBUG nova.network.neutron [-] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.925202] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1026.925826] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1026.925826] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1026.925943] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1026.926366] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1026.926456] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1026.927032] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1026.927032] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1026.927248] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1026.927383] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1026.927606] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1026.936011] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ddd0d984-9b8a-41a0-bab7-969690a818f4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.950725] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1026.951085] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-892f6810-40ff-4b13-b017-cfe2b6f88415 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.958096] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395134, 'name': CreateSnapshot_Task, 'duration_secs': 0.580174} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.958841] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1026.959823] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4359c8cf-c73e-4ad0-8ad7-77cb80f75bff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.964132] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1026.964132] env[61978]: value = "task-1395139" [ 1026.964132] env[61978]: _type = "Task" [ 1026.964132] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.966272] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1026.966272] env[61978]: value = "task-1395140" [ 1026.966272] env[61978]: _type = "Task" [ 1026.966272] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.991447] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.998682] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395140, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.998988] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395137, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.047248] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4430cb9c-0fb5-4631-aba7-fe00ad347a16 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.058967] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd294bf4-a5cd-4f59-8c94-7a9708a53fe6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.102502] env[61978]: DEBUG nova.compute.manager [req-0bbb4198-61b2-49a6-bdc7-812e388e66e0 req-12c6e89d-d1d3-4afa-82e1-cd9c4ff6f0db service nova] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Detach interface failed, port_id=948f3af3-d436-4415-b7d8-edefe3d32c25, reason: Instance aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1027.184491] env[61978]: DEBUG nova.scheduler.client.report [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1027.204053] env[61978]: DEBUG oslo_vmware.api [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395135, 'name': PowerOnVM_Task, 'duration_secs': 0.627863} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.205522] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1027.205882] env[61978]: INFO nova.compute.manager [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Took 13.35 seconds to spawn the instance on the hypervisor. [ 1027.206206] env[61978]: DEBUG nova.compute.manager [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1027.207767] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2435108-c2f6-4698-a0a1-f4612f7616b2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.282854] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395138, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.338936] env[61978]: INFO nova.compute.manager [-] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Took 1.49 seconds to deallocate network for instance. [ 1027.480995] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395139, 'name': ReconfigVM_Task, 'duration_secs': 0.330062} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.485025] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance 'f930ab49-c215-4b2e-92b1-21c0d52a70eb' progress to 33 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1027.496476] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1027.496926] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395137, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600154} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.501485] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1bd1c06c-a009-4dab-8ca2-c28d21191f46 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.508449] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] adf25af8-28c4-444e-b849-88d643f57dcf/adf25af8-28c4-444e-b849-88d643f57dcf.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1027.508732] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1027.509058] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395140, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.509316] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7855cb13-b85b-4125-a102-57e84e638935 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.518659] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1027.518659] env[61978]: value = "task-1395141" [ 1027.518659] env[61978]: _type = "Task" [ 1027.518659] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.520324] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1027.520324] env[61978]: value = "task-1395142" [ 1027.520324] env[61978]: _type = "Task" [ 1027.520324] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.549705] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395141, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.553216] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395142, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.696350] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.235s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.699686] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.411s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.701690] env[61978]: INFO nova.compute.claims [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.732022] env[61978]: INFO nova.scheduler.client.report [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Deleted allocations for instance dd686727-fc33-4dc4-b386-aabec27cf215 [ 1027.734860] env[61978]: INFO nova.compute.manager [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Took 37.28 seconds to build instance. [ 1027.782214] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395138, 'name': PowerOnVM_Task, 'duration_secs': 0.837045} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.783175] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1027.783175] env[61978]: INFO nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Took 9.32 seconds to spawn the instance on the hypervisor. [ 1027.783175] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1027.783767] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9378a840-4087-4c72-8df2-f5523b5c161b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.812106] env[61978]: DEBUG oslo_vmware.rw_handles [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b59d0c-9197-38ab-011d-ab51e257e894/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1027.813142] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ee0988-115e-4a0a-93fd-995b78ed5108 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.819241] env[61978]: DEBUG nova.network.neutron [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Successfully updated port: 0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1027.824080] env[61978]: DEBUG oslo_vmware.rw_handles [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b59d0c-9197-38ab-011d-ab51e257e894/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1027.824080] env[61978]: ERROR oslo_vmware.rw_handles [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b59d0c-9197-38ab-011d-ab51e257e894/disk-0.vmdk due to incomplete transfer. [ 1027.824080] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-dae8d81a-7b43-4ddf-9a47-ab6d86f97eca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.833495] env[61978]: DEBUG oslo_vmware.rw_handles [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b59d0c-9197-38ab-011d-ab51e257e894/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1027.833638] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Uploaded image b1c651f7-625a-4460-839a-a49c838332bb to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1027.836291] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1027.837209] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4126dcc1-88ce-4a03-89ee-f0ebfb87963a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.845212] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.846952] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1027.846952] env[61978]: value = "task-1395143" [ 1027.846952] env[61978]: _type = "Task" [ 1027.846952] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.869174] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395143, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.985323] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395140, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.001551] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T15:05:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='85790f6c-8872-4acd-90a5-40fd0cc369d4',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1158956347',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1028.001963] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1028.002266] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1028.002583] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1028.002869] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1028.003152] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1028.003500] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1028.003776] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1028.004077] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1028.004372] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1028.004673] env[61978]: DEBUG nova.virt.hardware [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1028.013871] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Reconfiguring VM instance instance-0000002b to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1028.014802] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c726a46e-03d0-4790-9d89-9a1b108e5628 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.049922] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395141, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078859} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.055661] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1028.056318] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1028.056318] env[61978]: value = "task-1395144" [ 1028.056318] env[61978]: _type = "Task" [ 1028.056318] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.057206] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395142, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.057934] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f348f5ae-32b2-4c21-8ca3-e8fe99f9719a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.085982] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] adf25af8-28c4-444e-b849-88d643f57dcf/adf25af8-28c4-444e-b849-88d643f57dcf.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1028.089312] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30c811d6-bc75-49b1-847e-ffbc3df76338 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.108822] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395144, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.117609] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1028.117609] env[61978]: value = "task-1395145" [ 1028.117609] env[61978]: _type = "Task" [ 1028.117609] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.129846] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395145, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.238597] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc26e13a-52e7-47de-86c5-7f98c8d02b92 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.693s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.239155] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0f2d70c8-9310-46aa-931d-3c48b3e894d1 tempest-ImagesOneServerTestJSON-188626847 tempest-ImagesOneServerTestJSON-188626847-project-member] Lock "dd686727-fc33-4dc4-b386-aabec27cf215" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.703s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.302785] env[61978]: INFO nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Took 35.71 seconds to build instance. [ 1028.360272] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395143, 'name': Destroy_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.450049] env[61978]: DEBUG nova.compute.manager [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1028.450949] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be4bb2d-2d86-48fe-b0ca-fc7099c9f27f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.487134] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395140, 'name': CreateSnapshot_Task, 'duration_secs': 1.127842} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.488144] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1028.489021] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352b33ed-2944-426e-bb35-26bbd754c0c2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.548954] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395142, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.572559] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395144, 'name': ReconfigVM_Task, 'duration_secs': 0.296528} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.572844] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Reconfigured VM instance instance-0000002b to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1028.573789] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b158c6c-589b-4018-9375-e386f67aec58 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.600720] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] f930ab49-c215-4b2e-92b1-21c0d52a70eb/f930ab49-c215-4b2e-92b1-21c0d52a70eb.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1028.601068] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24d26468-0a5e-4741-a2f5-d30133d35b13 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.622659] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1028.622659] env[61978]: value = "task-1395146" [ 1028.622659] env[61978]: _type = "Task" [ 1028.622659] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.629851] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395145, 'name': ReconfigVM_Task, 'duration_secs': 0.508852} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.630536] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Reconfigured VM instance instance-00000036 to attach disk [datastore2] adf25af8-28c4-444e-b849-88d643f57dcf/adf25af8-28c4-444e-b849-88d643f57dcf.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1028.631235] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01f57eb4-7565-4a64-8fa6-45443c5a4c97 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.636579] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395146, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.640391] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1028.640391] env[61978]: value = "task-1395147" [ 1028.640391] env[61978]: _type = "Task" [ 1028.640391] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.655631] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395147, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.658482] env[61978]: DEBUG nova.compute.manager [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Received event network-vif-plugged-0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1028.658677] env[61978]: DEBUG oslo_concurrency.lockutils [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] Acquiring lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.658969] env[61978]: DEBUG oslo_concurrency.lockutils [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] Lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.659241] env[61978]: DEBUG oslo_concurrency.lockutils [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] Lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.659475] env[61978]: DEBUG nova.compute.manager [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] No waiting events found dispatching network-vif-plugged-0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1028.659732] env[61978]: WARNING nova.compute.manager [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Received unexpected event network-vif-plugged-0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d for instance with vm_state building and task_state spawning. [ 1028.659965] env[61978]: DEBUG nova.compute.manager [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Received event network-changed-0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1028.660212] env[61978]: DEBUG nova.compute.manager [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Refreshing instance network info cache due to event network-changed-0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1028.660533] env[61978]: DEBUG oslo_concurrency.lockutils [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] Acquiring lock "refresh_cache-5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.660736] env[61978]: DEBUG oslo_concurrency.lockutils [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] Acquired lock "refresh_cache-5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.660936] env[61978]: DEBUG nova.network.neutron [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Refreshing network info cache for port 0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1028.804910] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.807s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.831996] env[61978]: DEBUG oslo_concurrency.lockutils [None req-194bc99f-77f3-4f0b-95bf-37a2a98613ad tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.832290] env[61978]: DEBUG oslo_concurrency.lockutils [None req-194bc99f-77f3-4f0b-95bf-37a2a98613ad tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.832490] env[61978]: DEBUG nova.compute.manager [None req-194bc99f-77f3-4f0b-95bf-37a2a98613ad tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1028.833537] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a7e83d-b6b8-4585-bb04-255f5663b44b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.844114] env[61978]: DEBUG nova.compute.manager [None req-194bc99f-77f3-4f0b-95bf-37a2a98613ad tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61978) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1028.844753] env[61978]: DEBUG nova.objects.instance [None req-194bc99f-77f3-4f0b-95bf-37a2a98613ad tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lazy-loading 'flavor' on Instance uuid 7e71c8de-1f94-4161-8ad8-a67792c5ce24 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1028.860459] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395143, 'name': Destroy_Task, 'duration_secs': 0.681639} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.860829] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Destroyed the VM [ 1028.860962] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1028.861901] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-58e8d78f-724d-41d0-b9fe-a190c794f31c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.873768] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1028.873768] env[61978]: value = "task-1395148" [ 1028.873768] env[61978]: _type = "Task" [ 1028.873768] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.883094] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395148, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.963287] env[61978]: INFO nova.compute.manager [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] instance snapshotting [ 1028.963465] env[61978]: WARNING nova.compute.manager [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1028.966692] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02318afe-78fc-443f-9820-9742987f14c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.993667] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191a7ac1-606b-4c00-9ff4-fcbdf0417566 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.010426] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1029.015453] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ba675d0a-cfcc-45fc-9eb1-9b7b0c660843 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.026664] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1029.026664] env[61978]: value = "task-1395149" [ 1029.026664] env[61978]: _type = "Task" [ 1029.026664] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.043709] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395149, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.053164] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395142, 'name': CloneVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.140081] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395146, 'name': ReconfigVM_Task, 'duration_secs': 0.483113} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.140081] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Reconfigured VM instance instance-0000002b to attach disk [datastore2] f930ab49-c215-4b2e-92b1-21c0d52a70eb/f930ab49-c215-4b2e-92b1-21c0d52a70eb.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1029.140081] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance 'f930ab49-c215-4b2e-92b1-21c0d52a70eb' progress to 50 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1029.160943] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395147, 'name': Rename_Task, 'duration_secs': 0.208374} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.164213] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1029.166547] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b37a5433-f5b6-449b-9633-e646be96f42b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.176577] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1029.176577] env[61978]: value = "task-1395150" [ 1029.176577] env[61978]: _type = "Task" [ 1029.176577] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.193369] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.214093] env[61978]: DEBUG nova.network.neutron [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1029.256896] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a9429a-0b39-429a-a784-c19ee11e693a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.266113] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f93219e-8ae2-4e8e-b88e-826f933c9a64 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.307513] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97259d4-bafe-4c49-845a-ff323eda1c2f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.319026] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7575ffa6-0895-4e35-8942-3fafeb419c5f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.335799] env[61978]: DEBUG nova.compute.provider_tree [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.349670] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-194bc99f-77f3-4f0b-95bf-37a2a98613ad tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1029.350066] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cc2e65c-f0e3-41ee-b069-56a2013413e7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.354785] env[61978]: DEBUG nova.network.neutron [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.359442] env[61978]: DEBUG oslo_vmware.api [None req-194bc99f-77f3-4f0b-95bf-37a2a98613ad tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1029.359442] env[61978]: value = "task-1395151" [ 1029.359442] env[61978]: _type = "Task" [ 1029.359442] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.369655] env[61978]: DEBUG oslo_vmware.api [None req-194bc99f-77f3-4f0b-95bf-37a2a98613ad tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395151, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.384188] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395148, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.514067] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1029.514470] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d7d9875c-002c-484d-81e1-82519c7a779e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.527024] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1029.527024] env[61978]: value = "task-1395152" [ 1029.527024] env[61978]: _type = "Task" [ 1029.527024] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.547576] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395149, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.547908] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395152, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.556030] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395142, 'name': CloneVM_Task, 'duration_secs': 1.543317} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.556030] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Created linked-clone VM from snapshot [ 1029.556673] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61dd956-39df-48af-8ca3-c8786f2dcaf4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.567096] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Uploading image e94fc8cc-cba8-45f1-a46f-dfb130fa6e5a {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1029.594675] env[61978]: DEBUG oslo_vmware.rw_handles [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1029.594675] env[61978]: value = "vm-295918" [ 1029.594675] env[61978]: _type = "VirtualMachine" [ 1029.594675] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1029.595660] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-16fc0dc8-ce6d-477f-bbc7-5d1577fd3d2b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.609033] env[61978]: DEBUG oslo_vmware.rw_handles [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lease: (returnval){ [ 1029.609033] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527d8d65-f2a5-368a-8c80-59833828cb6b" [ 1029.609033] env[61978]: _type = "HttpNfcLease" [ 1029.609033] env[61978]: } obtained for exporting VM: (result){ [ 1029.609033] env[61978]: value = "vm-295918" [ 1029.609033] env[61978]: _type = "VirtualMachine" [ 1029.609033] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1029.609679] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the lease: (returnval){ [ 1029.609679] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527d8d65-f2a5-368a-8c80-59833828cb6b" [ 1029.609679] env[61978]: _type = "HttpNfcLease" [ 1029.609679] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1029.622025] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1029.622025] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527d8d65-f2a5-368a-8c80-59833828cb6b" [ 1029.622025] env[61978]: _type = "HttpNfcLease" [ 1029.622025] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1029.622025] env[61978]: DEBUG oslo_vmware.rw_handles [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1029.622025] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527d8d65-f2a5-368a-8c80-59833828cb6b" [ 1029.622025] env[61978]: _type = "HttpNfcLease" [ 1029.622025] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1029.626021] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acca3799-96f2-466a-8fc1-498dcea9de32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.639428] env[61978]: DEBUG oslo_vmware.rw_handles [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb09cd-004b-e7f9-a9ff-547a5df63e34/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1029.639978] env[61978]: DEBUG oslo_vmware.rw_handles [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb09cd-004b-e7f9-a9ff-547a5df63e34/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1029.709173] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bcae34d-f208-46c7-867a-70603a8fb465 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.733340] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395150, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.749640] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cdcaf1-4da9-4229-ba0f-8a0475e57e71 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.773408] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance 'f930ab49-c215-4b2e-92b1-21c0d52a70eb' progress to 67 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1029.780560] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2c9ebf7e-61cc-442b-8093-c77e10a3ba47 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.840442] env[61978]: DEBUG nova.scheduler.client.report [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1029.859611] env[61978]: DEBUG oslo_concurrency.lockutils [req-89eab72a-26e8-48e4-81d6-ab7362e2539c req-77edf20d-1acc-43eb-80ca-1e77e03855b5 service nova] Releasing lock "refresh_cache-5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.871210] env[61978]: DEBUG oslo_vmware.api [None req-194bc99f-77f3-4f0b-95bf-37a2a98613ad tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395151, 'name': PowerOffVM_Task, 'duration_secs': 0.18142} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.871531] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-194bc99f-77f3-4f0b-95bf-37a2a98613ad tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.871739] env[61978]: DEBUG nova.compute.manager [None req-194bc99f-77f3-4f0b-95bf-37a2a98613ad tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1029.872598] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d91d60a-ea83-4b85-8659-fae6fa507042 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.888029] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395148, 'name': RemoveSnapshot_Task, 'duration_secs': 0.577499} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.889124] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1029.889480] env[61978]: DEBUG nova.compute.manager [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1029.890327] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b45cd9-b3c3-4e50-9815-59fe4ca708f1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.037654] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395152, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.046290] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395149, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.219187] env[61978]: DEBUG nova.network.neutron [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Successfully updated port: 8664e01e-1422-4709-85a0-c3684ca5733c {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.226883] env[61978]: DEBUG oslo_vmware.api [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395150, 'name': PowerOnVM_Task, 'duration_secs': 0.566048} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.227538] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1030.227728] env[61978]: INFO nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Took 9.19 seconds to spawn the instance on the hypervisor. [ 1030.227951] env[61978]: DEBUG nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1030.229252] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37cbdee-bc56-4ddf-b643-f46627286ac0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.335462] env[61978]: DEBUG nova.network.neutron [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Port fcd64700-31ef-4310-8986-b22e515b1c55 binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1030.347959] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.349269] env[61978]: DEBUG nova.compute.manager [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1030.354138] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.383s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.354635] env[61978]: DEBUG nova.objects.instance [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lazy-loading 'resources' on Instance uuid 2c1ce021-255f-454d-ba0e-c85380f3e973 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.392439] env[61978]: DEBUG oslo_concurrency.lockutils [None req-194bc99f-77f3-4f0b-95bf-37a2a98613ad tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.560s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.404498] env[61978]: INFO nova.compute.manager [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Shelve offloading [ 1030.406010] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1030.406786] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e199b686-d2bc-4a68-9dab-fe20f090b4bd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.418136] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1030.418136] env[61978]: value = "task-1395154" [ 1030.418136] env[61978]: _type = "Task" [ 1030.418136] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.432038] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1030.432828] env[61978]: DEBUG nova.compute.manager [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1030.434180] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91245cc-6a92-4ce7-818c-57d9ce6e553e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.442851] env[61978]: DEBUG oslo_concurrency.lockutils [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "refresh_cache-8f609401-af09-4291-a1e7-a356fbc4aac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.443256] env[61978]: DEBUG oslo_concurrency.lockutils [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "refresh_cache-8f609401-af09-4291-a1e7-a356fbc4aac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.443682] env[61978]: DEBUG nova.network.neutron [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1030.538855] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395152, 'name': CreateSnapshot_Task, 'duration_secs': 0.554709} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.539951] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1030.540776] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5326af93-7b02-4b33-9419-ab5da0b2c84e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.548053] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395149, 'name': CloneVM_Task, 'duration_secs': 1.217201} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.549081] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Created linked-clone VM from snapshot [ 1030.550350] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca916db-34d5-4048-aa3a-5ac5811c7db4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.568317] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Uploading image fefd40fb-471f-4299-8d1e-84e9dcba19c2 {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1030.605994] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1030.605994] env[61978]: value = "vm-295920" [ 1030.605994] env[61978]: _type = "VirtualMachine" [ 1030.605994] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1030.607226] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2ded191e-abe9-46f5-b0fb-bb07741f7e60 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.618744] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lease: (returnval){ [ 1030.618744] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5226f33a-4afa-b4fd-50cd-d153aa37bc82" [ 1030.618744] env[61978]: _type = "HttpNfcLease" [ 1030.618744] env[61978]: } obtained for exporting VM: (result){ [ 1030.618744] env[61978]: value = "vm-295920" [ 1030.618744] env[61978]: _type = "VirtualMachine" [ 1030.618744] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1030.618999] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the lease: (returnval){ [ 1030.618999] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5226f33a-4afa-b4fd-50cd-d153aa37bc82" [ 1030.618999] env[61978]: _type = "HttpNfcLease" [ 1030.618999] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1030.628795] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1030.628795] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5226f33a-4afa-b4fd-50cd-d153aa37bc82" [ 1030.628795] env[61978]: _type = "HttpNfcLease" [ 1030.628795] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1030.750777] env[61978]: INFO nova.compute.manager [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Took 35.37 seconds to build instance. [ 1030.824029] env[61978]: DEBUG nova.compute.manager [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Received event network-vif-plugged-8664e01e-1422-4709-85a0-c3684ca5733c {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1030.824297] env[61978]: DEBUG oslo_concurrency.lockutils [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] Acquiring lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.824523] env[61978]: DEBUG oslo_concurrency.lockutils [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] Lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.824718] env[61978]: DEBUG oslo_concurrency.lockutils [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] Lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.824878] env[61978]: DEBUG nova.compute.manager [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] No waiting events found dispatching network-vif-plugged-8664e01e-1422-4709-85a0-c3684ca5733c {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1030.825091] env[61978]: WARNING nova.compute.manager [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Received unexpected event network-vif-plugged-8664e01e-1422-4709-85a0-c3684ca5733c for instance with vm_state building and task_state spawning. [ 1030.825342] env[61978]: DEBUG nova.compute.manager [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Received event network-changed-8664e01e-1422-4709-85a0-c3684ca5733c {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1030.825484] env[61978]: DEBUG nova.compute.manager [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Refreshing instance network info cache due to event network-changed-8664e01e-1422-4709-85a0-c3684ca5733c. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1030.825731] env[61978]: DEBUG oslo_concurrency.lockutils [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] Acquiring lock "refresh_cache-5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.825792] env[61978]: DEBUG oslo_concurrency.lockutils [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] Acquired lock "refresh_cache-5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.825949] env[61978]: DEBUG nova.network.neutron [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Refreshing network info cache for port 8664e01e-1422-4709-85a0-c3684ca5733c {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1030.857735] env[61978]: DEBUG nova.compute.utils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1030.859158] env[61978]: DEBUG nova.compute.manager [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1030.859337] env[61978]: DEBUG nova.network.neutron [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1030.947539] env[61978]: DEBUG nova.policy [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '354d8a810de04cda9ef973275109aa2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a3471435d4747648cd8ddf0817d9b85', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1031.072216] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1031.076266] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e5cc1bc6-2709-4110-8d87-97313eb10552 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.091927] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1031.091927] env[61978]: value = "task-1395156" [ 1031.091927] env[61978]: _type = "Task" [ 1031.091927] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.113318] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395156, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.134261] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1031.134261] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5226f33a-4afa-b4fd-50cd-d153aa37bc82" [ 1031.134261] env[61978]: _type = "HttpNfcLease" [ 1031.134261] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1031.134261] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1031.134261] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5226f33a-4afa-b4fd-50cd-d153aa37bc82" [ 1031.134261] env[61978]: _type = "HttpNfcLease" [ 1031.134261] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1031.138107] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ec6b7e-dd64-4bbe-b8db-daca94ef80c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.152468] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ad4bd2-2436-bc03-6fc9-eea4a9f92281/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1031.153065] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ad4bd2-2436-bc03-6fc9-eea4a9f92281/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1031.235737] env[61978]: DEBUG oslo_concurrency.lockutils [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.236374] env[61978]: DEBUG oslo_concurrency.lockutils [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.236742] env[61978]: DEBUG oslo_concurrency.lockutils [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.236939] env[61978]: DEBUG oslo_concurrency.lockutils [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.237220] env[61978]: DEBUG oslo_concurrency.lockutils [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.239456] env[61978]: INFO nova.compute.manager [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Terminating instance [ 1031.250304] env[61978]: DEBUG nova.compute.manager [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1031.250637] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1031.255280] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7e4dc2-73eb-47b4-b8db-febb6e9520bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.260739] env[61978]: DEBUG oslo_concurrency.lockutils [None req-996da2ac-b70b-4aab-bddf-0b3c8acab893 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "adf25af8-28c4-444e-b849-88d643f57dcf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.195s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.268140] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1031.273020] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9df6d4d8-0efa-4098-9c7c-3ea9a25c8f60 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.284602] env[61978]: DEBUG oslo_vmware.api [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1031.284602] env[61978]: value = "task-1395157" [ 1031.284602] env[61978]: _type = "Task" [ 1031.284602] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.300785] env[61978]: DEBUG oslo_vmware.api [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395157, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.305348] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6c3bbaf1-2f04-41cb-94eb-f835454edea8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.372638] env[61978]: DEBUG nova.compute.manager [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1031.381645] env[61978]: DEBUG oslo_concurrency.lockutils [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.381895] env[61978]: DEBUG oslo_concurrency.lockutils [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.382652] env[61978]: DEBUG oslo_concurrency.lockutils [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.415206] env[61978]: DEBUG nova.network.neutron [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.455093] env[61978]: DEBUG nova.network.neutron [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Updating instance_info_cache with network_info: [{"id": "1682c3e8-c35b-4055-90d6-a236d4439ee1", "address": "fa:16:3e:ba:a2:f0", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1682c3e8-c3", "ovs_interfaceid": "1682c3e8-c35b-4055-90d6-a236d4439ee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.536380] env[61978]: DEBUG nova.network.neutron [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Successfully created port: c0a88faf-13e7-4c53-bd83-b5d1060f8d5b {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1031.604278] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395156, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.624549] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91dd34b6-25ee-493f-84b6-95be5d7810c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.633950] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b983e8-13a0-434e-86a8-9a5c5ed6dadb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.672818] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd22cba-bd5d-4cba-a4de-abdbb8dcc244 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.681397] env[61978]: DEBUG nova.network.neutron [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.686025] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db12f43b-3503-41cf-b4c8-21fa29790576 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.703592] env[61978]: DEBUG nova.compute.provider_tree [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.750678] env[61978]: INFO nova.compute.manager [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Rebuilding instance [ 1031.761675] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "adf25af8-28c4-444e-b849-88d643f57dcf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.762054] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "adf25af8-28c4-444e-b849-88d643f57dcf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.762356] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "adf25af8-28c4-444e-b849-88d643f57dcf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.763300] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "adf25af8-28c4-444e-b849-88d643f57dcf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.763901] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "adf25af8-28c4-444e-b849-88d643f57dcf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.768563] env[61978]: INFO nova.compute.manager [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Terminating instance [ 1031.770873] env[61978]: DEBUG nova.compute.manager [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1031.771236] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1031.772090] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4bc1a3-45ac-4515-aa20-2fb587ed470a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.783529] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1031.783837] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cdcb1576-abd9-49ba-84eb-ee88e7c36ce5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.797137] env[61978]: DEBUG oslo_vmware.api [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395157, 'name': PowerOffVM_Task, 'duration_secs': 0.286504} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.798953] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1031.799373] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1031.799814] env[61978]: DEBUG oslo_vmware.api [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1031.799814] env[61978]: value = "task-1395158" [ 1031.799814] env[61978]: _type = "Task" [ 1031.799814] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.802357] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a745ec38-cc3d-4e41-882d-b01b5fefeef7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.815106] env[61978]: DEBUG oslo_vmware.api [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395158, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.961124] env[61978]: DEBUG oslo_concurrency.lockutils [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "refresh_cache-8f609401-af09-4291-a1e7-a356fbc4aac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.106396] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395156, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.189704] env[61978]: DEBUG oslo_concurrency.lockutils [req-69cf5119-7d65-438a-9ebb-c9b6ff95fe4a req-ff4fdc92-0240-4aca-bbbd-d629b774682c service nova] Releasing lock "refresh_cache-5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.207312] env[61978]: DEBUG nova.scheduler.client.report [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1032.316613] env[61978]: DEBUG oslo_vmware.api [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395158, 'name': PowerOffVM_Task, 'duration_secs': 0.226757} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.317221] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1032.317443] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1032.318236] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfcee5a9-5b57-4b52-ac98-fdb4b18f0a79 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.398578] env[61978]: DEBUG nova.compute.manager [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1032.442201] env[61978]: DEBUG oslo_concurrency.lockutils [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.442425] env[61978]: DEBUG oslo_concurrency.lockutils [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.442578] env[61978]: DEBUG nova.network.neutron [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.564608] env[61978]: DEBUG nova.network.neutron [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Successfully updated port: 30edec9f-85c0-41f9-ab16-aea72cc18c06 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1032.566387] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1032.567873] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9166bc-af5c-43d8-827e-9e4fa0e63862 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.577669] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1032.581181] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d516f706-a671-4c4f-a514-5c8000a19243 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.605417] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395156, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.714306] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.359s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.716831] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.513s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.718348] env[61978]: DEBUG nova.objects.instance [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lazy-loading 'resources' on Instance uuid 8a21e6a7-c34e-4af0-b1fd-8a501694614c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.745587] env[61978]: INFO nova.scheduler.client.report [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Deleted allocations for instance 2c1ce021-255f-454d-ba0e-c85380f3e973 [ 1032.760642] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1032.760642] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1032.760642] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Deleting the datastore file [datastore2] dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1032.761019] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3786d7b6-3161-4fac-bdb2-707eabdf68cd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.763638] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1032.763894] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1032.764153] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Deleting the datastore file [datastore2] adf25af8-28c4-444e-b849-88d643f57dcf {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1032.764961] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6af46a29-2e6c-4333-9bf9-2a5131d8d386 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.770643] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1032.770975] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1032.771242] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleting the datastore file [datastore2] 8f609401-af09-4291-a1e7-a356fbc4aac9 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1032.771638] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e062a066-2f37-4d6e-a935-1f4442c4210f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.777688] env[61978]: DEBUG oslo_vmware.api [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1032.777688] env[61978]: value = "task-1395162" [ 1032.777688] env[61978]: _type = "Task" [ 1032.777688] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.778216] env[61978]: DEBUG oslo_vmware.api [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1032.778216] env[61978]: value = "task-1395163" [ 1032.778216] env[61978]: _type = "Task" [ 1032.778216] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.786963] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1032.786963] env[61978]: value = "task-1395164" [ 1032.786963] env[61978]: _type = "Task" [ 1032.786963] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.799065] env[61978]: DEBUG oslo_vmware.api [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395163, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.799869] env[61978]: DEBUG oslo_vmware.api [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395162, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.806622] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.067643] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "refresh_cache-5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.067643] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquired lock "refresh_cache-5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.067643] env[61978]: DEBUG nova.network.neutron [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1033.105078] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395156, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.239626] env[61978]: DEBUG nova.network.neutron [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Successfully updated port: c0a88faf-13e7-4c53-bd83-b5d1060f8d5b {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1033.263711] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6d45377a-e1e3-4d22-aa6d-9baae299fe01 tempest-ServerGroupTestJSON-1227790595 tempest-ServerGroupTestJSON-1227790595-project-member] Lock "2c1ce021-255f-454d-ba0e-c85380f3e973" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.467s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.304992] env[61978]: DEBUG oslo_vmware.api [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395163, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.305439] env[61978]: DEBUG oslo_vmware.api [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395162, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.315081] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.496482] env[61978]: DEBUG nova.network.neutron [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance_info_cache with network_info: [{"id": "fcd64700-31ef-4310-8986-b22e515b1c55", "address": "fa:16:3e:a7:22:5a", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd64700-31", "ovs_interfaceid": "fcd64700-31ef-4310-8986-b22e515b1c55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.606070] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395156, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.611226] env[61978]: DEBUG nova.network.neutron [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1033.717699] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ff3bb9-0db4-4a41-9c6a-3fce1546744f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.731362] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e888d12b-5d7c-4db9-900b-fe34fd7f7853 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.767518] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "refresh_cache-3ee1023c-7837-4db0-88d4-f88c9a43fba3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.767651] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquired lock "refresh_cache-3ee1023c-7837-4db0-88d4-f88c9a43fba3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.767804] env[61978]: DEBUG nova.network.neutron [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1033.772554] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b538ed42-3b39-4b66-a01e-02f22454d980 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.790410] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd43704-af57-4b8d-8577-e328ab9f3276 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.813863] env[61978]: DEBUG oslo_vmware.api [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395163, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.587419} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.814325] env[61978]: DEBUG oslo_vmware.api [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395162, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.585452} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.824448] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1033.824780] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1033.825076] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1033.825376] env[61978]: INFO nova.compute.manager [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Took 2.05 seconds to destroy the instance on the hypervisor. [ 1033.825591] env[61978]: DEBUG oslo.service.loopingcall [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.825873] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1033.826132] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1033.826397] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1033.826640] env[61978]: INFO nova.compute.manager [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Took 2.58 seconds to destroy the instance on the hypervisor. [ 1033.826936] env[61978]: DEBUG oslo.service.loopingcall [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.827571] env[61978]: DEBUG nova.compute.provider_tree [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.834798] env[61978]: DEBUG nova.compute.manager [-] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1033.835041] env[61978]: DEBUG nova.network.neutron [-] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1033.836738] env[61978]: DEBUG nova.compute.manager [-] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1033.836871] env[61978]: DEBUG nova.network.neutron [-] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1033.839163] env[61978]: DEBUG oslo_vmware.api [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395164, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.579076} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.840172] env[61978]: DEBUG nova.scheduler.client.report [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1033.843591] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1033.843809] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1033.844018] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1033.868322] env[61978]: INFO nova.scheduler.client.report [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted allocations for instance 8f609401-af09-4291-a1e7-a356fbc4aac9 [ 1033.998596] env[61978]: DEBUG oslo_concurrency.lockutils [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.105800] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395156, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.124762] env[61978]: DEBUG nova.network.neutron [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Updating instance_info_cache with network_info: [{"id": "0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d", "address": "fa:16:3e:85:8e:47", "network": {"id": "7eec4577-9cb8-47c3-89d5-035fe7edc036", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-206202374", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ac08d0f-ea", "ovs_interfaceid": "0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8664e01e-1422-4709-85a0-c3684ca5733c", "address": "fa:16:3e:c5:e9:39", "network": {"id": "228d9811-3a39-4966-b633-05f07ba74cec", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1088133781", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8664e01e-14", "ovs_interfaceid": "8664e01e-1422-4709-85a0-c3684ca5733c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30edec9f-85c0-41f9-ab16-aea72cc18c06", "address": "fa:16:3e:f8:50:b7", "network": {"id": "7eec4577-9cb8-47c3-89d5-035fe7edc036", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-206202374", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30edec9f-85", "ovs_interfaceid": "30edec9f-85c0-41f9-ab16-aea72cc18c06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.302093] env[61978]: DEBUG nova.network.neutron [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1034.348348] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.632s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.350712] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.268s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.352351] env[61978]: INFO nova.compute.claims [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] [instance: c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1034.373911] env[61978]: DEBUG oslo_concurrency.lockutils [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.392294] env[61978]: INFO nova.scheduler.client.report [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleted allocations for instance 8a21e6a7-c34e-4af0-b1fd-8a501694614c [ 1034.473429] env[61978]: DEBUG nova.network.neutron [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Updating instance_info_cache with network_info: [{"id": "c0a88faf-13e7-4c53-bd83-b5d1060f8d5b", "address": "fa:16:3e:40:fe:15", "network": {"id": "140282e9-127a-4f19-b6d1-6bea55474c67", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-904021562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a3471435d4747648cd8ddf0817d9b85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a88faf-13", "ovs_interfaceid": "c0a88faf-13e7-4c53-bd83-b5d1060f8d5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.528805] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5f004c-36fb-4296-86f4-a3a682bfc045 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.554540] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247a5c25-b066-48d2-b0c1-8704da0f2d29 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.564974] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance 'f930ab49-c215-4b2e-92b1-21c0d52a70eb' progress to 83 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1034.609668] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395156, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.630067] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Releasing lock "refresh_cache-5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.630067] env[61978]: DEBUG nova.compute.manager [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Instance network_info: |[{"id": "0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d", "address": "fa:16:3e:85:8e:47", "network": {"id": "7eec4577-9cb8-47c3-89d5-035fe7edc036", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-206202374", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ac08d0f-ea", "ovs_interfaceid": "0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8664e01e-1422-4709-85a0-c3684ca5733c", "address": "fa:16:3e:c5:e9:39", "network": {"id": "228d9811-3a39-4966-b633-05f07ba74cec", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1088133781", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8664e01e-14", "ovs_interfaceid": "8664e01e-1422-4709-85a0-c3684ca5733c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30edec9f-85c0-41f9-ab16-aea72cc18c06", "address": "fa:16:3e:f8:50:b7", "network": {"id": "7eec4577-9cb8-47c3-89d5-035fe7edc036", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-206202374", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30edec9f-85", "ovs_interfaceid": "30edec9f-85c0-41f9-ab16-aea72cc18c06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1034.630067] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:8e:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:e9:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dcf5c3f7-4e33-4f21-b323-3673930b789c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8664e01e-1422-4709-85a0-c3684ca5733c', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:50:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30edec9f-85c0-41f9-ab16-aea72cc18c06', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.644561] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Creating folder: Project (26cb7552530047c5867347d62195121e). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1034.646598] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a44f78aa-6bf1-4e59-bfdd-ab206cc88891 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.661261] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Created folder: Project (26cb7552530047c5867347d62195121e) in parent group-v295764. [ 1034.661261] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Creating folder: Instances. Parent ref: group-v295923. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1034.661261] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f4f1f79-0869-472c-a569-b5c89763a588 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.673117] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Created folder: Instances in parent group-v295923. [ 1034.673431] env[61978]: DEBUG oslo.service.loopingcall [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.673642] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1034.674396] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95df6c56-ed81-413e-b1e1-a5fb024792fa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.707124] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.707124] env[61978]: value = "task-1395167" [ 1034.707124] env[61978]: _type = "Task" [ 1034.707124] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.718336] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395167, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.857509] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.507s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.858539] env[61978]: DEBUG nova.compute.utils [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] [instance: c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e] Instance c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e could not be found. {{(pid=61978) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1034.860124] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.969s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.862174] env[61978]: INFO nova.compute.claims [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1034.865808] env[61978]: DEBUG nova.compute.manager [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] [instance: c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e] Instance disappeared during build. {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2513}} [ 1034.866057] env[61978]: DEBUG nova.compute.manager [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] [instance: c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e] Unplugging VIFs for instance {{(pid=61978) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1034.866423] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] Acquiring lock "refresh_cache-c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.866640] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] Acquired lock "refresh_cache-c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.866871] env[61978]: DEBUG nova.network.neutron [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] [instance: c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1034.905113] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2da5f792-a8e3-4bce-8792-c0746be1b7cc tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "8a21e6a7-c34e-4af0-b1fd-8a501694614c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.999s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.974909] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Releasing lock "refresh_cache-3ee1023c-7837-4db0-88d4-f88c9a43fba3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.975346] env[61978]: DEBUG nova.compute.manager [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Instance network_info: |[{"id": "c0a88faf-13e7-4c53-bd83-b5d1060f8d5b", "address": "fa:16:3e:40:fe:15", "network": {"id": "140282e9-127a-4f19-b6d1-6bea55474c67", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-904021562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a3471435d4747648cd8ddf0817d9b85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a88faf-13", "ovs_interfaceid": "c0a88faf-13e7-4c53-bd83-b5d1060f8d5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1035.072457] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1035.072895] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30cf1035-9847-4640-aca6-194aa98c4ee9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.082626] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1035.082626] env[61978]: value = "task-1395168" [ 1035.082626] env[61978]: _type = "Task" [ 1035.082626] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.106940] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395168, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.110457] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395156, 'name': CloneVM_Task, 'duration_secs': 3.748887} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.110738] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Created linked-clone VM from snapshot [ 1035.111547] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4691d8b0-a62e-435e-9827-1d3debec5ec3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.122186] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Uploading image 48818170-fdfe-4bfb-b961-d9a9ec32e870 {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1035.200100] env[61978]: DEBUG nova.network.neutron [-] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.202972] env[61978]: DEBUG nova.network.neutron [-] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.218068] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395167, 'name': CreateVM_Task, 'duration_secs': 0.471689} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.218253] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1035.219390] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.219491] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.219769] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1035.220331] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17f7dc8d-a05f-400b-89c6-305001da8ede {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.226367] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1035.226367] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5209efeb-7e68-a25d-7aab-e460ca99c919" [ 1035.226367] env[61978]: _type = "Task" [ 1035.226367] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.236832] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5209efeb-7e68-a25d-7aab-e460ca99c919, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.372040] env[61978]: DEBUG nova.compute.utils [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] [instance: c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e] Can not refresh info_cache because instance was not found {{(pid=61978) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1035.390445] env[61978]: DEBUG nova.network.neutron [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] [instance: c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1035.455718] env[61978]: DEBUG nova.network.neutron [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] [instance: c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.596087] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395168, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.703204] env[61978]: INFO nova.compute.manager [-] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Took 1.87 seconds to deallocate network for instance. [ 1035.705309] env[61978]: INFO nova.compute.manager [-] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Took 1.87 seconds to deallocate network for instance. [ 1035.738870] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5209efeb-7e68-a25d-7aab-e460ca99c919, 'name': SearchDatastore_Task, 'duration_secs': 0.013492} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.739259] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.739524] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.739793] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.739960] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.740169] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.740547] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3586dab1-47cd-4cee-abcf-2fd6c96a2f8d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.751908] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.752143] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1035.752989] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad47fdbd-9964-4086-9f88-ef57006b675b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.760674] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1035.760674] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52caaeba-8bf0-b338-c8a2-9c392252cc6b" [ 1035.760674] env[61978]: _type = "Task" [ 1035.760674] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.773889] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52caaeba-8bf0-b338-c8a2-9c392252cc6b, 'name': SearchDatastore_Task, 'duration_secs': 0.011458} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.774848] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-335692e4-9e98-473d-acb9-08228312bc96 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.781244] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1035.781244] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c713cb-71f6-ed7a-b786-c562cbdb7ad3" [ 1035.781244] env[61978]: _type = "Task" [ 1035.781244] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.790852] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c713cb-71f6-ed7a-b786-c562cbdb7ad3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.958558] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] Releasing lock "refresh_cache-c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.959490] env[61978]: DEBUG nova.compute.manager [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61978) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1035.960568] env[61978]: DEBUG nova.compute.manager [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] [instance: c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e] Skipping network deallocation for instance since networking was not requested. {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1036.099609] env[61978]: DEBUG oslo_vmware.api [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395168, 'name': PowerOnVM_Task, 'duration_secs': 0.552348} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.100231] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1036.100559] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-955df5d1-75ab-495a-86e4-c717103ed039 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance 'f930ab49-c215-4b2e-92b1-21c0d52a70eb' progress to 100 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1036.216565] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.218271] env[61978]: DEBUG oslo_concurrency.lockutils [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.301230] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c713cb-71f6-ed7a-b786-c562cbdb7ad3, 'name': SearchDatastore_Task, 'duration_secs': 0.010753} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.302078] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.303037] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5/5d9556d2-fcdd-416f-8f16-0fb271ff4ca5.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1036.303471] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3cac8564-1a0c-4e83-8b69-93fb886bf7c6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.319497] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1036.319497] env[61978]: value = "task-1395169" [ 1036.319497] env[61978]: _type = "Task" [ 1036.319497] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.327669] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5362f6c-252b-4634-84da-9f2a42f12ee1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.340733] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395169, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.343591] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee07f4d6-a36e-4d82-82e3-f8123bfca033 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.379515] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea226e3-6d9b-4f64-b3ed-5f1d05c237f9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.388827] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ab781d-f566-4367-8266-b7408e22597f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.405515] env[61978]: DEBUG nova.compute.provider_tree [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.831770] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395169, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.909195] env[61978]: DEBUG nova.scheduler.client.report [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1036.982373] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d78c0c60-7a44-4abd-954b-1a67ed8b698d tempest-ServersListShow296Test-1906221242 tempest-ServersListShow296Test-1906221242-project-member] Lock "c6d1c82b-4d59-4e5c-b4e5-b258cd132e6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.926s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.331045] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395169, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550559} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.331365] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5/5d9556d2-fcdd-416f-8f16-0fb271ff4ca5.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1037.331593] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1037.331876] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b860ffee-54f7-4b67-9c78-672822c2aee7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.341059] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1037.341059] env[61978]: value = "task-1395170" [ 1037.341059] env[61978]: _type = "Task" [ 1037.341059] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.352451] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395170, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.416639] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.417286] env[61978]: DEBUG nova.compute.manager [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1037.420197] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 12.902s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.853441] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395170, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071594} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.853620] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1037.854533] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b74d7c-192a-454f-a309-dd52ec867815 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.884772] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5/5d9556d2-fcdd-416f-8f16-0fb271ff4ca5.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.885170] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d7bbd25-a337-4fa3-a167-09bc4c8b635a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.907506] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1037.907506] env[61978]: value = "task-1395171" [ 1037.907506] env[61978]: _type = "Task" [ 1037.907506] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.917406] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395171, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.924271] env[61978]: DEBUG nova.compute.utils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1037.937602] env[61978]: DEBUG nova.compute.manager [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1037.941175] env[61978]: DEBUG nova.compute.manager [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1037.941398] env[61978]: DEBUG nova.network.neutron [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1038.004407] env[61978]: DEBUG nova.policy [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '031a98d4e0f345c28fd226142db35516', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86f4ae0b29af4ee2b33e5a499cf1e899', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1038.183217] env[61978]: DEBUG nova.compute.manager [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1038.184143] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17abda19-388f-4cab-85d2-7ec37bfb71b4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.213240] env[61978]: DEBUG nova.virt.hardware [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1038.213240] env[61978]: DEBUG nova.virt.hardware [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1038.213476] env[61978]: DEBUG nova.virt.hardware [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1038.213532] env[61978]: DEBUG nova.virt.hardware [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1038.213727] env[61978]: DEBUG nova.virt.hardware [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1038.213906] env[61978]: DEBUG nova.virt.hardware [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1038.215400] env[61978]: DEBUG nova.virt.hardware [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1038.215400] env[61978]: DEBUG nova.virt.hardware [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1038.215400] env[61978]: DEBUG nova.virt.hardware [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1038.215400] env[61978]: DEBUG nova.virt.hardware [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1038.215400] env[61978]: DEBUG nova.virt.hardware [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1038.218412] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e55a21-4287-4116-8a75-03e04acd64ae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.235409] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de627625-a5da-4c59-bc00-adc48802c77d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.243684] env[61978]: DEBUG oslo_vmware.rw_handles [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1038.243684] env[61978]: value = "vm-295922" [ 1038.243684] env[61978]: _type = "VirtualMachine" [ 1038.243684] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1038.244772] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8fe8916f-0df2-4407-b677-fede9f2bbf96 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.254439] env[61978]: DEBUG oslo_vmware.rw_handles [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb09cd-004b-e7f9-a9ff-547a5df63e34/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1038.255705] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab07ebc-3f41-419b-8f69-6ce9c7e109b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.267974] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:fe:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eed34ae1-5f7f-4deb-9db8-85eaa1e60c29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0a88faf-13e7-4c53-bd83-b5d1060f8d5b', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1038.276070] env[61978]: DEBUG oslo.service.loopingcall [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1038.276479] env[61978]: DEBUG oslo_vmware.rw_handles [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lease: (returnval){ [ 1038.276479] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ee97c6-6b48-fc51-a2f2-bb4f82ab60ff" [ 1038.276479] env[61978]: _type = "HttpNfcLease" [ 1038.276479] env[61978]: } obtained for exporting VM: (result){ [ 1038.276479] env[61978]: value = "vm-295922" [ 1038.276479] env[61978]: _type = "VirtualMachine" [ 1038.276479] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1038.276752] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the lease: (returnval){ [ 1038.276752] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ee97c6-6b48-fc51-a2f2-bb4f82ab60ff" [ 1038.276752] env[61978]: _type = "HttpNfcLease" [ 1038.276752] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1038.277486] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1038.277787] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eabc25f8-0621-4038-93b2-e1cbdcd62e33 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.298927] env[61978]: DEBUG oslo_vmware.rw_handles [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb09cd-004b-e7f9-a9ff-547a5df63e34/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1038.298927] env[61978]: ERROR oslo_vmware.rw_handles [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb09cd-004b-e7f9-a9ff-547a5df63e34/disk-0.vmdk due to incomplete transfer. [ 1038.299275] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2d22d741-8717-484e-a3ab-a04b1a9d4976 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.303163] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1038.303163] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ee97c6-6b48-fc51-a2f2-bb4f82ab60ff" [ 1038.303163] env[61978]: _type = "HttpNfcLease" [ 1038.303163] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1038.303454] env[61978]: DEBUG oslo_vmware.rw_handles [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1038.303454] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ee97c6-6b48-fc51-a2f2-bb4f82ab60ff" [ 1038.303454] env[61978]: _type = "HttpNfcLease" [ 1038.303454] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1038.304243] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8feaadaf-917d-488e-9044-05bb1d75313e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.308638] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1038.308638] env[61978]: value = "task-1395173" [ 1038.308638] env[61978]: _type = "Task" [ 1038.308638] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.318480] env[61978]: DEBUG nova.network.neutron [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Successfully created port: fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1038.320719] env[61978]: DEBUG oslo_vmware.rw_handles [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb09cd-004b-e7f9-a9ff-547a5df63e34/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1038.320923] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Uploaded image e94fc8cc-cba8-45f1-a46f-dfb130fa6e5a to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1038.323487] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1038.323800] env[61978]: DEBUG oslo_vmware.rw_handles [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52299151-28a8-4440-ab0f-9e7c88999c5d/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1038.323973] env[61978]: DEBUG oslo_vmware.rw_handles [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52299151-28a8-4440-ab0f-9e7c88999c5d/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1038.325580] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8ec8d1e2-b904-4cc4-a6cf-b5f90dabc3a1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.332086] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395173, 'name': CreateVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.427464] env[61978]: DEBUG nova.compute.manager [req-1a72a385-b3de-499a-9bf8-4f23f7accb0e req-6b4ea8bb-455b-4777-a2a8-07f0b6020ff7 service nova] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Received event network-vif-unplugged-1682c3e8-c35b-4055-90d6-a236d4439ee1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.427850] env[61978]: DEBUG oslo_concurrency.lockutils [req-1a72a385-b3de-499a-9bf8-4f23f7accb0e req-6b4ea8bb-455b-4777-a2a8-07f0b6020ff7 service nova] Acquiring lock "8f609401-af09-4291-a1e7-a356fbc4aac9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.428127] env[61978]: DEBUG oslo_concurrency.lockutils [req-1a72a385-b3de-499a-9bf8-4f23f7accb0e req-6b4ea8bb-455b-4777-a2a8-07f0b6020ff7 service nova] Lock "8f609401-af09-4291-a1e7-a356fbc4aac9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.428414] env[61978]: DEBUG oslo_concurrency.lockutils [req-1a72a385-b3de-499a-9bf8-4f23f7accb0e req-6b4ea8bb-455b-4777-a2a8-07f0b6020ff7 service nova] Lock "8f609401-af09-4291-a1e7-a356fbc4aac9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.428810] env[61978]: DEBUG nova.compute.manager [req-1a72a385-b3de-499a-9bf8-4f23f7accb0e req-6b4ea8bb-455b-4777-a2a8-07f0b6020ff7 service nova] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] No waiting events found dispatching network-vif-unplugged-1682c3e8-c35b-4055-90d6-a236d4439ee1 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1038.428964] env[61978]: WARNING nova.compute.manager [req-1a72a385-b3de-499a-9bf8-4f23f7accb0e req-6b4ea8bb-455b-4777-a2a8-07f0b6020ff7 service nova] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Received unexpected event network-vif-unplugged-1682c3e8-c35b-4055-90d6-a236d4439ee1 for instance with vm_state shelved_offloaded and task_state None. [ 1038.429989] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1038.429989] env[61978]: value = "task-1395174" [ 1038.429989] env[61978]: _type = "Task" [ 1038.429989] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.443615] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395171, 'name': ReconfigVM_Task, 'duration_secs': 0.389011} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.444417] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5/5d9556d2-fcdd-416f-8f16-0fb271ff4ca5.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.449892] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Applying migration context for instance f930ab49-c215-4b2e-92b1-21c0d52a70eb as it has an incoming, in-progress migration 3552715b-f1cf-4686-a31b-df98ffe8a8b8. Migration status is finished {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1038.452469] env[61978]: INFO nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating resource usage from migration 3552715b-f1cf-4686-a31b-df98ffe8a8b8 [ 1038.454803] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40853eb9-f672-44ac-b169-aaa37ee83cce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.463940] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395174, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.471500] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1038.471500] env[61978]: value = "task-1395175" [ 1038.471500] env[61978]: _type = "Task" [ 1038.471500] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.476362] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 92eb5edb-803b-48d4-8c4f-338d7c3b3d13 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.476362] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 243e7146-46fc-43f4-a83b-cdc58f397f9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.476362] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 85fc5af8-454d-4042-841a-945b7e84eb6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.476362] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance eb7cb200-c162-4e92-8916-6d9abd5cf34d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.476362] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 96bef3f3-a45c-43ba-a86a-66c1d5686ea6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.476362] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b26a4784-698d-477a-8db7-58156899d231 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.476822] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 50788030-4dc2-4215-bf2c-acba5dd33ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.476822] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance c17c986e-c008-4414-8dd1-4ea836458048 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.476822] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f3c837fb-be7e-40a6-aae4-7f213c62ab2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.476822] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.476972] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1038.481627] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.481627] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.481627] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance bdfdd685-e440-4f53-b6c4-2ee2f06acba8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.481627] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f1001633-e4e5-4de1-8a6b-cf653e43d821 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.481627] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.481627] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b356fc81-f857-4416-8eb0-28c66d137967 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.481627] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance cb004a19-0048-4766-af7c-0fbde867f422 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.481627] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1038.481627] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 4c7053ee-7c44-49ee-8d30-bf14686c6b1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.481627] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 0d48ae5d-7cc8-42b3-a993-44636e9cb171 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.481627] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 7e71c8de-1f94-4161-8ad8-a67792c5ce24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.481627] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1038.481627] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance adf25af8-28c4-444e-b849-88d643f57dcf is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1038.481627] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.482617] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Migration 3552715b-f1cf-4686-a31b-df98ffe8a8b8 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1038.482617] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f930ab49-c215-4b2e-92b1-21c0d52a70eb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.482617] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 3ee1023c-7837-4db0-88d4-f88c9a43fba3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.482617] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 9ee04ee8-98ec-4be9-935d-cad7cd176466 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1038.482617] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 25 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1038.482617] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=5440MB phys_disk=200GB used_disk=26GB total_vcpus=48 used_vcpus=25 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1038.486816] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9a3920f8-33fa-4181-b077-c4bc04700ed9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.496237] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395175, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.696890] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1038.697217] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e940d3c-55f4-466f-ad49-44b1833b38e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.707147] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1038.707147] env[61978]: value = "task-1395176" [ 1038.707147] env[61978]: _type = "Task" [ 1038.707147] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.717021] env[61978]: DEBUG nova.compute.manager [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Received event network-vif-plugged-30edec9f-85c0-41f9-ab16-aea72cc18c06 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.717205] env[61978]: DEBUG oslo_concurrency.lockutils [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] Acquiring lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.717440] env[61978]: DEBUG oslo_concurrency.lockutils [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] Lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.717685] env[61978]: DEBUG oslo_concurrency.lockutils [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] Lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.717957] env[61978]: DEBUG nova.compute.manager [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] No waiting events found dispatching network-vif-plugged-30edec9f-85c0-41f9-ab16-aea72cc18c06 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1038.718149] env[61978]: WARNING nova.compute.manager [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Received unexpected event network-vif-plugged-30edec9f-85c0-41f9-ab16-aea72cc18c06 for instance with vm_state building and task_state spawning. [ 1038.718354] env[61978]: DEBUG nova.compute.manager [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Received event network-changed-30edec9f-85c0-41f9-ab16-aea72cc18c06 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.718648] env[61978]: DEBUG nova.compute.manager [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Refreshing instance network info cache due to event network-changed-30edec9f-85c0-41f9-ab16-aea72cc18c06. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1038.719007] env[61978]: DEBUG oslo_concurrency.lockutils [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] Acquiring lock "refresh_cache-5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.719237] env[61978]: DEBUG oslo_concurrency.lockutils [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] Acquired lock "refresh_cache-5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.719445] env[61978]: DEBUG nova.network.neutron [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Refreshing network info cache for port 30edec9f-85c0-41f9-ab16-aea72cc18c06 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1038.728509] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1038.728824] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1038.729741] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43230a01-5bd4-42ba-9ed0-dfc06562c1b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.743641] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1038.744914] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05b00173-1301-4bad-a79a-b4fc7ee43974 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.828024] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395173, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.852875] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1038.852875] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1038.853605] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleting the datastore file [datastore2] 7e71c8de-1f94-4161-8ad8-a67792c5ce24 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1038.853888] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4c35e2a-078c-42da-a25c-d79792e758bd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.862724] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1038.862724] env[61978]: value = "task-1395178" [ 1038.862724] env[61978]: _type = "Task" [ 1038.862724] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.874093] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395178, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.943183] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395174, 'name': Destroy_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.959323] env[61978]: DEBUG nova.compute.manager [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1038.988964] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395175, 'name': Rename_Task, 'duration_secs': 0.299486} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.989268] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1038.989729] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71bde5c0-5ee8-4775-9b36-97cf01ab05d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.001466] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1039.001466] env[61978]: value = "task-1395179" [ 1039.001466] env[61978]: _type = "Task" [ 1039.001466] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.017560] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395179, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.021165] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420b0d54-4b86-4025-be65-6292e10964c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.028553] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43105b2-e282-418e-992f-08497c88698b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.072331] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa48b31-b2ea-4f5b-a778-bb23f3e818f6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.082303] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2d8766-2c8b-4dc8-80f7-b34e2441cced {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.099967] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.322693] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395173, 'name': CreateVM_Task, 'duration_secs': 0.559168} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.323008] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1039.323854] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.324217] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.324980] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1039.327626] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0c85d24-5268-4ff7-8311-297f5ae58fff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.334559] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1039.334559] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5285316d-f946-2a30-7a69-79f163d6bdff" [ 1039.334559] env[61978]: _type = "Task" [ 1039.334559] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.344600] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5285316d-f946-2a30-7a69-79f163d6bdff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.378403] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395178, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322673} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.378691] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1039.378888] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1039.379077] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1039.443633] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395174, 'name': Destroy_Task, 'duration_secs': 0.659857} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.443924] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Destroyed the VM [ 1039.444207] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1039.444602] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-394e4934-74f6-463b-88f3-93e5d9835156 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.453320] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1039.453320] env[61978]: value = "task-1395180" [ 1039.453320] env[61978]: _type = "Task" [ 1039.453320] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.464569] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395180, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.492132] env[61978]: DEBUG nova.network.neutron [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Updated VIF entry in instance network info cache for port 30edec9f-85c0-41f9-ab16-aea72cc18c06. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1039.493418] env[61978]: DEBUG nova.network.neutron [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Updating instance_info_cache with network_info: [{"id": "0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d", "address": "fa:16:3e:85:8e:47", "network": {"id": "7eec4577-9cb8-47c3-89d5-035fe7edc036", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-206202374", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ac08d0f-ea", "ovs_interfaceid": "0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8664e01e-1422-4709-85a0-c3684ca5733c", "address": "fa:16:3e:c5:e9:39", "network": {"id": "228d9811-3a39-4966-b633-05f07ba74cec", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1088133781", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8664e01e-14", "ovs_interfaceid": "8664e01e-1422-4709-85a0-c3684ca5733c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30edec9f-85c0-41f9-ab16-aea72cc18c06", "address": "fa:16:3e:f8:50:b7", "network": {"id": "7eec4577-9cb8-47c3-89d5-035fe7edc036", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-206202374", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30edec9f-85", "ovs_interfaceid": "30edec9f-85c0-41f9-ab16-aea72cc18c06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.514163] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395179, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.606884] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1039.616786] env[61978]: DEBUG nova.virt.hardware [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1039.618065] env[61978]: DEBUG nova.virt.hardware [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1039.618065] env[61978]: DEBUG nova.virt.hardware [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1039.618065] env[61978]: DEBUG nova.virt.hardware [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1039.618065] env[61978]: DEBUG nova.virt.hardware [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1039.618065] env[61978]: DEBUG nova.virt.hardware [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1039.618065] env[61978]: DEBUG nova.virt.hardware [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1039.618336] env[61978]: DEBUG nova.virt.hardware [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1039.618506] env[61978]: DEBUG nova.virt.hardware [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1039.618719] env[61978]: DEBUG nova.virt.hardware [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1039.618960] env[61978]: DEBUG nova.virt.hardware [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1039.621434] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea829b2-7a82-45a1-86e4-f76879f3c29e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.632862] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ad4bd2-2436-bc03-6fc9-eea4a9f92281/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1039.634625] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e710eeb-f21b-4e33-b8df-cad120589435 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.639504] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd8841d-5d03-44b5-b822-e35ad8c3b20a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.652257] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ad4bd2-2436-bc03-6fc9-eea4a9f92281/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1039.652377] env[61978]: ERROR oslo_vmware.rw_handles [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ad4bd2-2436-bc03-6fc9-eea4a9f92281/disk-0.vmdk due to incomplete transfer. [ 1039.663044] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-341363f2-9a70-4aeb-a239-021fb5760174 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.672238] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "50788030-4dc2-4215-bf2c-acba5dd33ce4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.674605] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "50788030-4dc2-4215-bf2c-acba5dd33ce4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.674605] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "50788030-4dc2-4215-bf2c-acba5dd33ce4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.674605] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "50788030-4dc2-4215-bf2c-acba5dd33ce4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.674605] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "50788030-4dc2-4215-bf2c-acba5dd33ce4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.678749] env[61978]: DEBUG oslo_vmware.rw_handles [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ad4bd2-2436-bc03-6fc9-eea4a9f92281/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1039.678964] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Uploaded image fefd40fb-471f-4299-8d1e-84e9dcba19c2 to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1039.681318] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1039.682171] env[61978]: INFO nova.compute.manager [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Terminating instance [ 1039.684123] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-eed8bee2-125a-444b-81c0-549d6f6a050c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.686768] env[61978]: DEBUG nova.compute.manager [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1039.687065] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1039.688466] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d892742d-fc46-4734-89cf-cae4479b6215 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.700909] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.702092] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9c34148-320c-433f-a0da-32e35d127fb5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.705376] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1039.705376] env[61978]: value = "task-1395181" [ 1039.705376] env[61978]: _type = "Task" [ 1039.705376] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.714830] env[61978]: DEBUG oslo_vmware.api [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 1039.714830] env[61978]: value = "task-1395182" [ 1039.714830] env[61978]: _type = "Task" [ 1039.714830] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.719647] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395181, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.732212] env[61978]: DEBUG oslo_vmware.api [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395182, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.846259] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5285316d-f946-2a30-7a69-79f163d6bdff, 'name': SearchDatastore_Task, 'duration_secs': 0.014891} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.846668] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.847295] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1039.847390] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.847557] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.847863] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1039.848259] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f620475d-5178-4c95-869f-e893ea179bb4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.860649] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1039.861095] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1039.861887] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25286a05-7db8-49d6-8c6d-60ba3387422e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.868699] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1039.868699] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521aadf4-f7f2-9713-25aa-6500245a6fdb" [ 1039.868699] env[61978]: _type = "Task" [ 1039.868699] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.886746] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521aadf4-f7f2-9713-25aa-6500245a6fdb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.968998] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395180, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.990137] env[61978]: DEBUG nova.network.neutron [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Port fcd64700-31ef-4310-8986-b22e515b1c55 binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1039.990137] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.990267] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.990939] env[61978]: DEBUG nova.network.neutron [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.999276] env[61978]: DEBUG oslo_concurrency.lockutils [req-9222f0e9-1b50-46a7-938b-ee405d285150 req-970a3008-61b6-472a-a9cf-52063062061c service nova] Releasing lock "refresh_cache-5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.019746] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395179, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.042542] env[61978]: DEBUG nova.network.neutron [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Successfully updated port: fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1040.118894] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1040.122245] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.699s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.122245] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.382s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.122245] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.122245] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.277s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.122245] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.124640] env[61978]: DEBUG oslo_concurrency.lockutils [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.751s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.124940] env[61978]: DEBUG oslo_concurrency.lockutils [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.128227] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.912s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.128300] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.131266] env[61978]: DEBUG oslo_concurrency.lockutils [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.913s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.131555] env[61978]: DEBUG oslo_concurrency.lockutils [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.154927] env[61978]: INFO nova.scheduler.client.report [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Deleted allocations for instance adf25af8-28c4-444e-b849-88d643f57dcf [ 1040.156354] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "8f609401-af09-4291-a1e7-a356fbc4aac9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.164661] env[61978]: INFO nova.scheduler.client.report [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Deleted allocations for instance dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56 [ 1040.182510] env[61978]: INFO nova.scheduler.client.report [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Deleted allocations for instance 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5 [ 1040.200713] env[61978]: INFO nova.scheduler.client.report [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Deleted allocations for instance aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba [ 1040.219296] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395181, 'name': Destroy_Task, 'duration_secs': 0.414731} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.219296] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Destroyed the VM [ 1040.219380] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1040.219669] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5b75f83d-95ff-42d0-b50d-a76e9924f90c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.232315] env[61978]: DEBUG oslo_vmware.api [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395182, 'name': PowerOffVM_Task, 'duration_secs': 0.243937} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.234146] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.234907] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.234907] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1040.234907] env[61978]: value = "task-1395183" [ 1040.234907] env[61978]: _type = "Task" [ 1040.234907] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.235197] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-842c8516-36d1-4d9f-b9f0-5117ced8a1c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.248746] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395183, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.315257] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.315482] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.315733] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleting the datastore file [datastore2] 50788030-4dc2-4215-bf2c-acba5dd33ce4 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.316085] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88e313c9-ecd2-4f55-8cf1-557b695f64b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.325521] env[61978]: DEBUG oslo_vmware.api [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 1040.325521] env[61978]: value = "task-1395185" [ 1040.325521] env[61978]: _type = "Task" [ 1040.325521] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.335284] env[61978]: DEBUG oslo_vmware.api [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.381423] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521aadf4-f7f2-9713-25aa-6500245a6fdb, 'name': SearchDatastore_Task, 'duration_secs': 0.017241} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.382387] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f511c2b1-979f-45b9-8702-57d61ba116ae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.393501] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1040.393501] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5254f62b-b149-567c-a352-d83717abe4d1" [ 1040.393501] env[61978]: _type = "Task" [ 1040.393501] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.404832] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5254f62b-b149-567c-a352-d83717abe4d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.431834] env[61978]: DEBUG nova.virt.hardware [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1040.432035] env[61978]: DEBUG nova.virt.hardware [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1040.432192] env[61978]: DEBUG nova.virt.hardware [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1040.432449] env[61978]: DEBUG nova.virt.hardware [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1040.432602] env[61978]: DEBUG nova.virt.hardware [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1040.432779] env[61978]: DEBUG nova.virt.hardware [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1040.433064] env[61978]: DEBUG nova.virt.hardware [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1040.433279] env[61978]: DEBUG nova.virt.hardware [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1040.433472] env[61978]: DEBUG nova.virt.hardware [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1040.433661] env[61978]: DEBUG nova.virt.hardware [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1040.433861] env[61978]: DEBUG nova.virt.hardware [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1040.434864] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1680c8-4bc8-473c-a80b-c82293dc3294 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.444889] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5c6fb0-57cd-45cd-ba69-71e902ecaec4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.464756] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:10:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1040.472287] env[61978]: DEBUG oslo.service.loopingcall [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1040.475945] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1040.476353] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f23943df-0305-436b-99a1-0dbff6c71bf0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.499514] env[61978]: DEBUG oslo_vmware.api [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395180, 'name': RemoveSnapshot_Task, 'duration_secs': 0.695116} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.502097] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1040.502097] env[61978]: INFO nova.compute.manager [None req-2af861b4-94b7-4836-8601-8c8ec9e36bc9 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Took 15.14 seconds to snapshot the instance on the hypervisor. [ 1040.506034] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1040.506034] env[61978]: value = "task-1395186" [ 1040.506034] env[61978]: _type = "Task" [ 1040.506034] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.519685] env[61978]: DEBUG oslo_vmware.api [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395179, 'name': PowerOnVM_Task, 'duration_secs': 1.435597} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.519893] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395186, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.520189] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1040.520408] env[61978]: INFO nova.compute.manager [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Took 14.05 seconds to spawn the instance on the hypervisor. [ 1040.520606] env[61978]: DEBUG nova.compute.manager [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1040.521517] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb65c579-2dc0-4c2b-91ff-6a10c2df7e37 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.546538] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.546784] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.547066] env[61978]: DEBUG nova.network.neutron [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1040.642902] env[61978]: DEBUG oslo_concurrency.lockutils [None req-713be77f-228c-4350-acdc-6d46a7874ffa tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "8f609401-af09-4291-a1e7-a356fbc4aac9" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.991s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.642902] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "8f609401-af09-4291-a1e7-a356fbc4aac9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.485s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.642902] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "8f609401-af09-4291-a1e7-a356fbc4aac9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.642902] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "8f609401-af09-4291-a1e7-a356fbc4aac9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.642902] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "8f609401-af09-4291-a1e7-a356fbc4aac9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.645118] env[61978]: INFO nova.compute.manager [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Terminating instance [ 1040.649159] env[61978]: DEBUG nova.compute.manager [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1040.649603] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1040.650340] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b51a34c-f1a8-45ad-93ff-f28ae64d826d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.665847] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5085b38a-d53b-4880-8bc7-20bec6900a52 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "adf25af8-28c4-444e-b849-88d643f57dcf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.904s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.672474] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0553d168-b93a-465a-91c8-3a91d17c636c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.693324] env[61978]: DEBUG oslo_concurrency.lockutils [None req-213bb15c-ab36-46dc-b843-826873f5f5df tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.457s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.697246] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "0cdff646-34ad-49d5-b775-28e8e7ce778e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.697864] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "0cdff646-34ad-49d5-b775-28e8e7ce778e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.702580] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d902f49b-4d2a-4a76-bd22-8ee2ffc32877 tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "3a30ecc4-455f-49cf-98e8-d38be6a1c5a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.421s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.710669] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17fdd9c0-59a5-4f7c-8d81-900d388a67f7 tempest-AttachInterfacesV270Test-558254141 tempest-AttachInterfacesV270Test-558254141-project-member] Lock "aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.552s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.742912] env[61978]: WARNING nova.virt.vmwareapi.vmops [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8f609401-af09-4291-a1e7-a356fbc4aac9 could not be found. [ 1040.743161] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1040.743343] env[61978]: INFO nova.compute.manager [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Took 0.09 seconds to destroy the instance on the hypervisor. [ 1040.743653] env[61978]: DEBUG oslo.service.loopingcall [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1040.747523] env[61978]: DEBUG nova.compute.manager [-] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1040.747722] env[61978]: DEBUG nova.network.neutron [-] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1040.760949] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395183, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.800407] env[61978]: DEBUG nova.compute.manager [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Received event network-vif-plugged-c0a88faf-13e7-4c53-bd83-b5d1060f8d5b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1040.800799] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Acquiring lock "3ee1023c-7837-4db0-88d4-f88c9a43fba3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.801053] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Lock "3ee1023c-7837-4db0-88d4-f88c9a43fba3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.801265] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Lock "3ee1023c-7837-4db0-88d4-f88c9a43fba3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.801459] env[61978]: DEBUG nova.compute.manager [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] No waiting events found dispatching network-vif-plugged-c0a88faf-13e7-4c53-bd83-b5d1060f8d5b {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1040.801682] env[61978]: WARNING nova.compute.manager [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Received unexpected event network-vif-plugged-c0a88faf-13e7-4c53-bd83-b5d1060f8d5b for instance with vm_state building and task_state spawning. [ 1040.801868] env[61978]: DEBUG nova.compute.manager [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Received event network-changed-c0a88faf-13e7-4c53-bd83-b5d1060f8d5b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1040.802072] env[61978]: DEBUG nova.compute.manager [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Refreshing instance network info cache due to event network-changed-c0a88faf-13e7-4c53-bd83-b5d1060f8d5b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1040.802350] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Acquiring lock "refresh_cache-3ee1023c-7837-4db0-88d4-f88c9a43fba3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.802545] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Acquired lock "refresh_cache-3ee1023c-7837-4db0-88d4-f88c9a43fba3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.802767] env[61978]: DEBUG nova.network.neutron [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Refreshing network info cache for port c0a88faf-13e7-4c53-bd83-b5d1060f8d5b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1040.803452] env[61978]: WARNING oslo_messaging._drivers.amqpdriver [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1040.838600] env[61978]: DEBUG oslo_vmware.api [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211185} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.838877] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.839101] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1040.839299] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1040.839479] env[61978]: INFO nova.compute.manager [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1040.839726] env[61978]: DEBUG oslo.service.loopingcall [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1040.839940] env[61978]: DEBUG nova.compute.manager [-] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1040.840045] env[61978]: DEBUG nova.network.neutron [-] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1040.905162] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5254f62b-b149-567c-a352-d83717abe4d1, 'name': SearchDatastore_Task, 'duration_secs': 0.015515} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.905517] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.905793] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 3ee1023c-7837-4db0-88d4-f88c9a43fba3/3ee1023c-7837-4db0-88d4-f88c9a43fba3.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1040.906124] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3565efbc-851c-45fe-afd6-bd8e2a5f7def {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.921033] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1040.921033] env[61978]: value = "task-1395187" [ 1040.921033] env[61978]: _type = "Task" [ 1040.921033] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.935903] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.020470] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395186, 'name': CreateVM_Task, 'duration_secs': 0.515165} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.020757] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1041.023622] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.023622] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.023622] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1041.023622] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c578e5aa-eb58-481c-89ed-1b5539513b65 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.029736] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1041.029736] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f0d904-76ed-7599-7a98-71cdada140e2" [ 1041.029736] env[61978]: _type = "Task" [ 1041.029736] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.047018] env[61978]: INFO nova.compute.manager [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Took 43.05 seconds to build instance. [ 1041.054468] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f0d904-76ed-7599-7a98-71cdada140e2, 'name': SearchDatastore_Task, 'duration_secs': 0.013606} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.055079] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.055188] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1041.055451] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.055611] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.055855] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1041.056564] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c67add40-e75f-4c64-bba0-4db9bdbfdec2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.067895] env[61978]: DEBUG nova.network.neutron [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance_info_cache with network_info: [{"id": "fcd64700-31ef-4310-8986-b22e515b1c55", "address": "fa:16:3e:a7:22:5a", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd64700-31", "ovs_interfaceid": "fcd64700-31ef-4310-8986-b22e515b1c55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.072388] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1041.072388] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1041.072388] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e013bab-3fbc-451c-affb-552ccf868906 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.078543] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1041.078543] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f32e3e-d61b-6f76-0b8f-ca32e0358522" [ 1041.078543] env[61978]: _type = "Task" [ 1041.078543] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.089754] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f32e3e-d61b-6f76-0b8f-ca32e0358522, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.120195] env[61978]: DEBUG nova.network.neutron [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1041.206225] env[61978]: DEBUG nova.compute.manager [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1041.266748] env[61978]: DEBUG oslo_concurrency.lockutils [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.267097] env[61978]: DEBUG oslo_concurrency.lockutils [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.267365] env[61978]: DEBUG oslo_concurrency.lockutils [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.268052] env[61978]: DEBUG oslo_concurrency.lockutils [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.268052] env[61978]: DEBUG oslo_concurrency.lockutils [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.272107] env[61978]: DEBUG oslo_vmware.api [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395183, 'name': RemoveSnapshot_Task, 'duration_secs': 0.564886} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.272107] env[61978]: INFO nova.compute.manager [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Terminating instance [ 1041.272479] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1041.272664] env[61978]: INFO nova.compute.manager [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Took 14.91 seconds to snapshot the instance on the hypervisor. [ 1041.276901] env[61978]: DEBUG nova.compute.manager [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1041.277119] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1041.280027] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a38d4a-c13c-4ae2-8352-d0bdd15e398c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.302739] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1041.303415] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af4537db-53ad-459a-948d-ed9ae6336508 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.321472] env[61978]: DEBUG oslo_vmware.api [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 1041.321472] env[61978]: value = "task-1395188" [ 1041.321472] env[61978]: _type = "Task" [ 1041.321472] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.337806] env[61978]: DEBUG oslo_vmware.api [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1395188, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.438898] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395187, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.500584] env[61978]: DEBUG nova.network.neutron [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updating instance_info_cache with network_info: [{"id": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "address": "fa:16:3e:0f:c0:2a", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf95a42-13", "ovs_interfaceid": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.556081] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e101b134-4288-4dfa-b8db-b3563162c932 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.722s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.572676] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.591143] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f32e3e-d61b-6f76-0b8f-ca32e0358522, 'name': SearchDatastore_Task, 'duration_secs': 0.01317} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.592019] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d528fa9e-6264-4fcc-90fb-972d9e2e1e00 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.599214] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1041.599214] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520840f2-4cce-0526-1bf1-ace7fd3b8353" [ 1041.599214] env[61978]: _type = "Task" [ 1041.599214] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.611133] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520840f2-4cce-0526-1bf1-ace7fd3b8353, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.697128] env[61978]: DEBUG nova.network.neutron [-] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.736913] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.736913] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.738706] env[61978]: INFO nova.compute.claims [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1041.798471] env[61978]: DEBUG nova.network.neutron [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Updated VIF entry in instance network info cache for port c0a88faf-13e7-4c53-bd83-b5d1060f8d5b. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1041.798824] env[61978]: DEBUG nova.network.neutron [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Updating instance_info_cache with network_info: [{"id": "c0a88faf-13e7-4c53-bd83-b5d1060f8d5b", "address": "fa:16:3e:40:fe:15", "network": {"id": "140282e9-127a-4f19-b6d1-6bea55474c67", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-904021562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a3471435d4747648cd8ddf0817d9b85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a88faf-13", "ovs_interfaceid": "c0a88faf-13e7-4c53-bd83-b5d1060f8d5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.834173] env[61978]: DEBUG oslo_vmware.api [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1395188, 'name': PowerOffVM_Task, 'duration_secs': 0.253642} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.836380] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1041.836569] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1041.837301] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3177792b-d826-4d06-8c96-873739d4715e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.844417] env[61978]: DEBUG nova.compute.manager [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Found 3 images (rotation: 2) {{(pid=61978) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 1041.844622] env[61978]: DEBUG nova.compute.manager [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Rotating out 1 backups {{(pid=61978) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4562}} [ 1041.844872] env[61978]: DEBUG nova.compute.manager [None req-c08fe205-8cd8-493d-96c7-ed51bf2bf228 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Deleting image 81a4ea36-3cda-42da-b32e-eca1d059e24b {{(pid=61978) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4567}} [ 1041.923985] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1041.924298] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1041.925423] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Deleting the datastore file [datastore1] b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1041.929679] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5df04219-ed86-4d98-8432-3868a3d96fbe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.942585] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395187, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.945043] env[61978]: DEBUG oslo_vmware.api [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for the task: (returnval){ [ 1041.945043] env[61978]: value = "task-1395190" [ 1041.945043] env[61978]: _type = "Task" [ 1041.945043] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.953649] env[61978]: DEBUG oslo_vmware.api [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1395190, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.957125] env[61978]: DEBUG nova.network.neutron [-] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.007864] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.007864] env[61978]: DEBUG nova.compute.manager [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Instance network_info: |[{"id": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "address": "fa:16:3e:0f:c0:2a", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf95a42-13", "ovs_interfaceid": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1042.007864] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:c0:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fdf95a42-1379-4895-9a94-f8a8cf1d070d', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1042.015580] env[61978]: DEBUG oslo.service.loopingcall [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1042.015990] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1042.016198] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3f4be1b-07bc-400a-9ef0-47d951f71fec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.042053] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1042.042053] env[61978]: value = "task-1395191" [ 1042.042053] env[61978]: _type = "Task" [ 1042.042053] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.054023] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395191, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.076523] env[61978]: DEBUG nova.compute.manager [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=61978) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:897}} [ 1042.076831] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.112886] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520840f2-4cce-0526-1bf1-ace7fd3b8353, 'name': SearchDatastore_Task, 'duration_secs': 0.01255} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.114250] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.114250] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 7e71c8de-1f94-4161-8ad8-a67792c5ce24/7e71c8de-1f94-4161-8ad8-a67792c5ce24.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1042.114250] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfbdaa1b-dde5-4976-9c7f-03a953060d7d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.123278] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1042.123278] env[61978]: value = "task-1395192" [ 1042.123278] env[61978]: _type = "Task" [ 1042.123278] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.133800] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.201394] env[61978]: INFO nova.compute.manager [-] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Took 1.36 seconds to deallocate network for instance. [ 1042.295726] env[61978]: DEBUG nova.compute.manager [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1042.296729] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b1bdc3-35d2-42f2-8938-39a21f28ffd8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.301813] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Releasing lock "refresh_cache-3ee1023c-7837-4db0-88d4-f88c9a43fba3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.302121] env[61978]: DEBUG nova.compute.manager [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Received event network-vif-deleted-a410cd2b-4149-421b-8f8e-287f5927da94 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1042.302323] env[61978]: DEBUG nova.compute.manager [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Received event network-vif-deleted-34e47e3b-49fc-4498-b258-cf27c276e3ac {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1042.302540] env[61978]: DEBUG nova.compute.manager [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Received event network-vif-plugged-fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1042.302737] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Acquiring lock "9ee04ee8-98ec-4be9-935d-cad7cd176466-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.302945] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.303208] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.303393] env[61978]: DEBUG nova.compute.manager [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] No waiting events found dispatching network-vif-plugged-fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1042.303568] env[61978]: WARNING nova.compute.manager [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Received unexpected event network-vif-plugged-fdf95a42-1379-4895-9a94-f8a8cf1d070d for instance with vm_state building and task_state spawning. [ 1042.303740] env[61978]: DEBUG nova.compute.manager [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Received event network-changed-fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1042.303900] env[61978]: DEBUG nova.compute.manager [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Refreshing instance network info cache due to event network-changed-fdf95a42-1379-4895-9a94-f8a8cf1d070d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1042.304130] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Acquiring lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.304306] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Acquired lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.304477] env[61978]: DEBUG nova.network.neutron [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Refreshing network info cache for port fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1042.434868] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395187, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.455738] env[61978]: DEBUG oslo_vmware.api [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Task: {'id': task-1395190, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284112} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.456509] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1042.456788] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1042.457046] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1042.457298] env[61978]: INFO nova.compute.manager [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1042.457795] env[61978]: DEBUG oslo.service.loopingcall [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1042.458081] env[61978]: DEBUG nova.compute.manager [-] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1042.458263] env[61978]: DEBUG nova.network.neutron [-] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1042.460612] env[61978]: INFO nova.compute.manager [-] [instance: 8f609401-af09-4291-a1e7-a356fbc4aac9] Took 1.71 seconds to deallocate network for instance. [ 1042.557704] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395191, 'name': CreateVM_Task, 'duration_secs': 0.485479} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.558040] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1042.558652] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.558991] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.559498] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1042.559626] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d94ef02f-3eb0-42d0-82b4-1fa0db168f6f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.565413] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1042.565413] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e2605e-5008-14a5-28a2-dec1fbf37204" [ 1042.565413] env[61978]: _type = "Task" [ 1042.565413] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.575918] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e2605e-5008-14a5-28a2-dec1fbf37204, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.586419] env[61978]: DEBUG oslo_concurrency.lockutils [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.586710] env[61978]: DEBUG oslo_concurrency.lockutils [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.586933] env[61978]: DEBUG oslo_concurrency.lockutils [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.587139] env[61978]: DEBUG oslo_concurrency.lockutils [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.587338] env[61978]: DEBUG oslo_concurrency.lockutils [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.589962] env[61978]: INFO nova.compute.manager [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Terminating instance [ 1042.592148] env[61978]: DEBUG nova.compute.manager [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1042.592300] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1042.593262] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e54fcb-a2ae-46b4-b7bb-9ba68792d6dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.603459] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.603804] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3afec92-2fa2-447c-8334-f3173344f317 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.614925] env[61978]: DEBUG oslo_vmware.api [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1042.614925] env[61978]: value = "task-1395193" [ 1042.614925] env[61978]: _type = "Task" [ 1042.614925] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.633691] env[61978]: DEBUG oslo_vmware.api [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395193, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.642091] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395192, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.710800] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.824755] env[61978]: INFO nova.compute.manager [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] instance snapshotting [ 1042.827853] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92abadde-51f6-4df5-85de-99b108183441 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.860955] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d5bebf-7484-4078-884e-956a3f5587bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.944784] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395187, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.589057} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.945391] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 3ee1023c-7837-4db0-88d4-f88c9a43fba3/3ee1023c-7837-4db0-88d4-f88c9a43fba3.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1042.946015] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1042.946767] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66601995-4595-4fe1-9a06-f0570463b146 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.961511] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1042.961511] env[61978]: value = "task-1395194" [ 1042.961511] env[61978]: _type = "Task" [ 1042.961511] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.973875] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395194, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.079073] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e2605e-5008-14a5-28a2-dec1fbf37204, 'name': SearchDatastore_Task, 'duration_secs': 0.013143} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.079465] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.079691] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1043.079936] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.080216] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.082113] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1043.082113] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43fb2429-4c71-4229-ae6d-909a035c8e4b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.095492] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1043.095492] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1043.095681] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-799ae477-8052-4770-a25e-08dfcb79f055 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.102805] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1043.102805] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524265b2-0705-d56d-a00d-c230ed6597df" [ 1043.102805] env[61978]: _type = "Task" [ 1043.102805] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.118071] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524265b2-0705-d56d-a00d-c230ed6597df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.133548] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395192, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.900082} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.133869] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 7e71c8de-1f94-4161-8ad8-a67792c5ce24/7e71c8de-1f94-4161-8ad8-a67792c5ce24.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1043.134139] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1043.134456] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a436e0cf-f952-4328-bae3-090d81bb42e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.141526] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1043.141526] env[61978]: value = "task-1395195" [ 1043.141526] env[61978]: _type = "Task" [ 1043.141526] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.151446] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.168558] env[61978]: DEBUG oslo_vmware.api [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395193, 'name': PowerOffVM_Task, 'duration_secs': 0.287724} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.168849] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1043.169030] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1043.169326] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8621a1b5-ea72-4202-ad3e-a2cdc3325232 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.380144] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1043.380837] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5dbbca5b-0110-481e-bec0-f6b63ce90fac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.391932] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1043.391932] env[61978]: value = "task-1395197" [ 1043.391932] env[61978]: _type = "Task" [ 1043.391932] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.408550] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395197, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.416054] env[61978]: DEBUG nova.compute.manager [req-5da6ea35-3509-482d-a5d3-3bcaeb22bffb req-111ac327-2b40-49fc-8719-ac5d1920c481 service nova] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Received event network-vif-deleted-66a47730-43bc-4ac1-b494-0ec1041be9d2 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1043.471573] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395194, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09706} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.474982] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1043.476830] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bcbfc9-4a1a-4deb-b6e3-20609dac621c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.512154] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 3ee1023c-7837-4db0-88d4-f88c9a43fba3/3ee1023c-7837-4db0-88d4-f88c9a43fba3.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1043.517854] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b0ff36e-da32-4375-ad56-d525e50bb9af {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.535373] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8a5298c-9df8-4635-8cd8-d5f02f127389 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "8f609401-af09-4291-a1e7-a356fbc4aac9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.894s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.550898] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "f3a9f204-e4ed-49f1-85ef-8cea7377cf89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.551192] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "f3a9f204-e4ed-49f1-85ef-8cea7377cf89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.552496] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1043.552496] env[61978]: value = "task-1395198" [ 1043.552496] env[61978]: _type = "Task" [ 1043.552496] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.570890] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395198, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.579070] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd34b98-cfff-4e97-9e4c-87d559f8ee7a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.591774] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "7e6178cf-b7be-46f8-8f8c-8605a09703c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.592093] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "7e6178cf-b7be-46f8-8f8c-8605a09703c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.595057] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d379bac-4123-4ddb-b44b-63bc3be9df47 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.634914] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408d57bc-58af-4037-b7f5-68e275b1e5c2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.654133] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524265b2-0705-d56d-a00d-c230ed6597df, 'name': SearchDatastore_Task, 'duration_secs': 0.015996} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.654133] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7bb2cfd-99ab-4244-8b5b-00c93e93fb99 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.661511] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a12d156b-4af0-430e-8106-70f21a4a863a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.664072] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.218776} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.664799] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1043.665987] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90acbac2-dbb5-4c59-8e80-1c9511f94230 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.675297] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1043.675297] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528f10f0-724b-b786-ed54-a915040a2a13" [ 1043.675297] env[61978]: _type = "Task" [ 1043.675297] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.703323] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 7e71c8de-1f94-4161-8ad8-a67792c5ce24/7e71c8de-1f94-4161-8ad8-a67792c5ce24.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1043.704354] env[61978]: DEBUG nova.compute.provider_tree [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.709217] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17e2beae-0842-491b-b00a-70f56c24962b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.726019] env[61978]: DEBUG nova.scheduler.client.report [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1043.738194] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528f10f0-724b-b786-ed54-a915040a2a13, 'name': SearchDatastore_Task, 'duration_secs': 0.013339} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.739655] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.739937] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 9ee04ee8-98ec-4be9-935d-cad7cd176466/9ee04ee8-98ec-4be9-935d-cad7cd176466.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1043.740330] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1043.740330] env[61978]: value = "task-1395199" [ 1043.740330] env[61978]: _type = "Task" [ 1043.740330] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.740544] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4aa5446f-c172-403a-9fb2-15efead4e454 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.753636] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395199, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.755170] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1043.755170] env[61978]: value = "task-1395200" [ 1043.755170] env[61978]: _type = "Task" [ 1043.755170] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.766233] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395200, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.793751] env[61978]: DEBUG nova.network.neutron [-] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.886405] env[61978]: DEBUG nova.network.neutron [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updated VIF entry in instance network info cache for port fdf95a42-1379-4895-9a94-f8a8cf1d070d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1043.886405] env[61978]: DEBUG nova.network.neutron [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updating instance_info_cache with network_info: [{"id": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "address": "fa:16:3e:0f:c0:2a", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf95a42-13", "ovs_interfaceid": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.914096] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395197, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.060056] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1044.080123] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395198, 'name': ReconfigVM_Task, 'duration_secs': 0.35009} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.080123] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 3ee1023c-7837-4db0-88d4-f88c9a43fba3/3ee1023c-7837-4db0-88d4-f88c9a43fba3.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1044.080325] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa76ab97-4e57-492f-b2d2-4d402c788194 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.092320] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1044.092320] env[61978]: value = "task-1395201" [ 1044.092320] env[61978]: _type = "Task" [ 1044.092320] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.094829] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1044.105623] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395201, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.231187] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.232402] env[61978]: DEBUG nova.compute.manager [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1044.236541] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 2.159s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.254744] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395199, 'name': ReconfigVM_Task, 'duration_secs': 0.315653} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.256386] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 7e71c8de-1f94-4161-8ad8-a67792c5ce24/7e71c8de-1f94-4161-8ad8-a67792c5ce24.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1044.256386] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-60b9ab8f-7c1d-40e4-8c56-aea7a14a80db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.268908] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395200, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.270468] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1044.270468] env[61978]: value = "task-1395202" [ 1044.270468] env[61978]: _type = "Task" [ 1044.270468] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.280340] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395202, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.296031] env[61978]: INFO nova.compute.manager [-] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Took 1.84 seconds to deallocate network for instance. [ 1044.388889] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfb770f5-14bc-4d9a-bfcb-061405c320e4 req-b6cc872d-013f-49c6-8f0e-c052c6cb7603 service nova] Releasing lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.408403] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395197, 'name': CreateSnapshot_Task, 'duration_secs': 0.803483} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.409380] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1044.410633] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6b0716-9a38-441c-823d-cdfea5f39795 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.463428] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.464246] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.592395] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.609937] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395201, 'name': Rename_Task, 'duration_secs': 0.268647} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.610315] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.610971] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6208a096-fd79-47f3-b29b-bdbaebe88b0d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.616201] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.619821] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1044.619821] env[61978]: value = "task-1395203" [ 1044.619821] env[61978]: _type = "Task" [ 1044.619821] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.629742] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395203, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.740643] env[61978]: DEBUG nova.compute.utils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1044.742339] env[61978]: DEBUG nova.compute.manager [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1044.742552] env[61978]: DEBUG nova.network.neutron [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1044.746251] env[61978]: DEBUG nova.objects.instance [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lazy-loading 'migration_context' on Instance uuid f930ab49-c215-4b2e-92b1-21c0d52a70eb {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.769228] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395200, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533208} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.769406] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 9ee04ee8-98ec-4be9-935d-cad7cd176466/9ee04ee8-98ec-4be9-935d-cad7cd176466.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1044.770413] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1044.770413] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4150c8cd-812d-4691-b7cb-bf3b064b77df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.782647] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395202, 'name': Rename_Task, 'duration_secs': 0.20682} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.784373] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.784940] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1044.784940] env[61978]: value = "task-1395204" [ 1044.784940] env[61978]: _type = "Task" [ 1044.784940] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.784940] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1eea58c3-af6e-41a8-95fd-7191e261a9ac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.798409] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395204, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.800384] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1044.800384] env[61978]: value = "task-1395205" [ 1044.800384] env[61978]: _type = "Task" [ 1044.800384] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.805242] env[61978]: DEBUG oslo_concurrency.lockutils [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.811684] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395205, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.822026] env[61978]: DEBUG nova.policy [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72c335696cee4638967757e4f4cdfe59', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c3fe7c7f560427db0f814a2c67bb527', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1044.936288] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1044.936727] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-44223df5-9044-4f34-934c-f72e78bd734b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.957232] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1044.957232] env[61978]: value = "task-1395206" [ 1044.957232] env[61978]: _type = "Task" [ 1044.957232] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.970238] env[61978]: INFO nova.compute.manager [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Detaching volume 229fb92f-4196-4c02-99cd-5cfa5c710c35 [ 1044.973192] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395206, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.017095] env[61978]: INFO nova.virt.block_device [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Attempting to driver detach volume 229fb92f-4196-4c02-99cd-5cfa5c710c35 from mountpoint /dev/sdb [ 1045.017244] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1045.017585] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295891', 'volume_id': '229fb92f-4196-4c02-99cd-5cfa5c710c35', 'name': 'volume-229fb92f-4196-4c02-99cd-5cfa5c710c35', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f3c837fb-be7e-40a6-aae4-7f213c62ab2c', 'attached_at': '', 'detached_at': '', 'volume_id': '229fb92f-4196-4c02-99cd-5cfa5c710c35', 'serial': '229fb92f-4196-4c02-99cd-5cfa5c710c35'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1045.021852] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2289d6-1ffe-4a2e-9ac9-8c11a818b09b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.056507] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49831a31-00d5-4206-b7c7-dc12dbf29bc4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.066249] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b24f7dde-c40a-48a7-a112-5e92c85ce258 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.098058] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de01817-4a3c-4a4b-b643-c395f94abc45 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.116682] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] The volume has not been displaced from its original location: [datastore2] volume-229fb92f-4196-4c02-99cd-5cfa5c710c35/volume-229fb92f-4196-4c02-99cd-5cfa5c710c35.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1045.122595] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Reconfiguring VM instance instance-0000001f to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1045.123592] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ccfd3912-b108-44a9-87aa-d7f4ad8eaa07 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.158265] env[61978]: DEBUG oslo_vmware.api [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395203, 'name': PowerOnVM_Task, 'duration_secs': 0.523168} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.160443] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1045.160583] env[61978]: INFO nova.compute.manager [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Took 12.76 seconds to spawn the instance on the hypervisor. [ 1045.160770] env[61978]: DEBUG nova.compute.manager [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1045.161175] env[61978]: DEBUG oslo_vmware.api [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1045.161175] env[61978]: value = "task-1395207" [ 1045.161175] env[61978]: _type = "Task" [ 1045.161175] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.163879] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288017d7-1fe2-4b47-bdf9-bec36d50af2e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.181890] env[61978]: DEBUG oslo_vmware.api [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395207, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.233858] env[61978]: DEBUG nova.network.neutron [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Successfully created port: 3b5e4ed3-f9fb-4eed-b851-213b746751b7 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1045.248209] env[61978]: DEBUG nova.compute.manager [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1045.304809] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395204, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079932} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.308531] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1045.311765] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6c2f48-bfb0-476a-91c6-4d54548a78f9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.336305] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395205, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.344979] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] 9ee04ee8-98ec-4be9-935d-cad7cd176466/9ee04ee8-98ec-4be9-935d-cad7cd176466.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1045.348644] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7433d6a-5077-4207-913e-cfcfb92cd54e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.376122] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1045.376122] env[61978]: value = "task-1395208" [ 1045.376122] env[61978]: _type = "Task" [ 1045.376122] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.394233] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395208, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.477645] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395206, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.680064] env[61978]: DEBUG oslo_vmware.api [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395207, 'name': ReconfigVM_Task, 'duration_secs': 0.513182} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.680492] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Reconfigured VM instance instance-0000001f to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1045.693030] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c0e820f-a2f2-4368-a233-da709e909025 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.711481] env[61978]: INFO nova.compute.manager [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Took 38.44 seconds to build instance. [ 1045.729617] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1045.729866] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1045.730096] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Deleting the datastore file [datastore1] 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1045.732068] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0d8ff64-7c0e-4e4c-8d7c-28f443dc0545 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.733837] env[61978]: DEBUG oslo_vmware.api [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1045.733837] env[61978]: value = "task-1395209" [ 1045.733837] env[61978]: _type = "Task" [ 1045.733837] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.749215] env[61978]: DEBUG oslo_vmware.api [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395209, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.749215] env[61978]: DEBUG oslo_vmware.api [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1045.749215] env[61978]: value = "task-1395210" [ 1045.749215] env[61978]: _type = "Task" [ 1045.749215] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.767722] env[61978]: DEBUG oslo_vmware.api [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.828243] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395205, 'name': PowerOnVM_Task, 'duration_secs': 0.60944} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.828243] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1045.828547] env[61978]: DEBUG nova.compute.manager [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1045.833928] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb05112-57de-4ad4-9f0e-7608ced6c59c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.857368] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9de2ba-7871-457f-b0c4-91167a15cffe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.869141] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6998cd56-a5d8-42b3-840c-f43b17ae157e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.913411] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8203a3-b9bf-4644-9b78-61ba176ee9e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.927635] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e65c8f-da4b-42c1-8a65-90de8e6987bf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.932406] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395208, 'name': ReconfigVM_Task, 'duration_secs': 0.525811} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.932629] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Reconfigured VM instance instance-0000003a to attach disk [datastore2] 9ee04ee8-98ec-4be9-935d-cad7cd176466/9ee04ee8-98ec-4be9-935d-cad7cd176466.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1045.933713] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-031e38b1-6cf9-49d7-8126-ac0e06b03450 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.945767] env[61978]: DEBUG nova.compute.provider_tree [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1045.950906] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1045.950906] env[61978]: value = "task-1395211" [ 1045.950906] env[61978]: _type = "Task" [ 1045.950906] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.958348] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395211, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.970022] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395206, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.975863] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "a0ea73d1-a613-4403-8527-a8b81a619adf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.976329] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "a0ea73d1-a613-4403-8527-a8b81a619adf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.030151] env[61978]: DEBUG nova.compute.manager [req-7cd8d1e7-8703-419f-a695-c0405b4b5c57 req-a28737bb-79f5-48ab-a2d1-d9a357ee4b5c service nova] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Received event network-vif-deleted-5ad44f35-6aec-4586-a2e9-9f486fa4fd57 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1046.217726] env[61978]: DEBUG oslo_concurrency.lockutils [None req-622aeffc-eec8-423e-9cdc-8531ad64a7c8 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "3ee1023c-7837-4db0-88d4-f88c9a43fba3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.961s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.254480] env[61978]: DEBUG oslo_vmware.api [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395209, 'name': ReconfigVM_Task, 'duration_secs': 0.203092} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.258752] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295891', 'volume_id': '229fb92f-4196-4c02-99cd-5cfa5c710c35', 'name': 'volume-229fb92f-4196-4c02-99cd-5cfa5c710c35', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f3c837fb-be7e-40a6-aae4-7f213c62ab2c', 'attached_at': '', 'detached_at': '', 'volume_id': '229fb92f-4196-4c02-99cd-5cfa5c710c35', 'serial': '229fb92f-4196-4c02-99cd-5cfa5c710c35'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1046.263670] env[61978]: DEBUG nova.compute.manager [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1046.275059] env[61978]: DEBUG oslo_vmware.api [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284894} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.275409] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1046.275612] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1046.275831] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1046.276044] env[61978]: INFO nova.compute.manager [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Took 3.68 seconds to destroy the instance on the hypervisor. [ 1046.276584] env[61978]: DEBUG oslo.service.loopingcall [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1046.276863] env[61978]: DEBUG nova.compute.manager [-] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1046.276965] env[61978]: DEBUG nova.network.neutron [-] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1046.290492] env[61978]: DEBUG nova.virt.hardware [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1046.290792] env[61978]: DEBUG nova.virt.hardware [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1046.292000] env[61978]: DEBUG nova.virt.hardware [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1046.292389] env[61978]: DEBUG nova.virt.hardware [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1046.292706] env[61978]: DEBUG nova.virt.hardware [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1046.293178] env[61978]: DEBUG nova.virt.hardware [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1046.293631] env[61978]: DEBUG nova.virt.hardware [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1046.294062] env[61978]: DEBUG nova.virt.hardware [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1046.294346] env[61978]: DEBUG nova.virt.hardware [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1046.294705] env[61978]: DEBUG nova.virt.hardware [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1046.294991] env[61978]: DEBUG nova.virt.hardware [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1046.296799] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0265b22-7f01-4a3c-9b5b-cf37617438bf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.313249] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b8811c-cab0-487c-9e5c-2537fda00a7e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.352927] env[61978]: INFO nova.compute.manager [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] bringing vm to original state: 'stopped' [ 1046.451207] env[61978]: DEBUG nova.scheduler.client.report [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1046.474200] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395206, 'name': CloneVM_Task, 'duration_secs': 1.505712} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.474545] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395211, 'name': Rename_Task, 'duration_secs': 0.226311} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.475462] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Created linked-clone VM from snapshot [ 1046.475794] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1046.476654] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9a2fda-6df3-4f27-8595-e6b32540296f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.480051] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5126d02-21d2-4b67-920c-8a556a3c143a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.482359] env[61978]: DEBUG nova.compute.manager [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1046.493550] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Uploading image 6d954726-4613-4115-bc63-9b69f59b17f3 {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1046.496950] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1046.496950] env[61978]: value = "task-1395212" [ 1046.496950] env[61978]: _type = "Task" [ 1046.496950] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.507300] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395212, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.520143] env[61978]: DEBUG oslo_vmware.rw_handles [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52299151-28a8-4440-ab0f-9e7c88999c5d/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1046.520765] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eae7d90-225f-4ffe-a999-06d7ae22476b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.528752] env[61978]: DEBUG oslo_vmware.rw_handles [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52299151-28a8-4440-ab0f-9e7c88999c5d/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1046.528981] env[61978]: ERROR oslo_vmware.rw_handles [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52299151-28a8-4440-ab0f-9e7c88999c5d/disk-0.vmdk due to incomplete transfer. [ 1046.529205] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b6f40266-bfe9-4d10-a50c-26eac5fb82ca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.533426] env[61978]: DEBUG oslo_vmware.rw_handles [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1046.533426] env[61978]: value = "vm-295930" [ 1046.533426] env[61978]: _type = "VirtualMachine" [ 1046.533426] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1046.534253] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-613aecd7-24ef-4e05-8aea-45213d1db2b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.542742] env[61978]: DEBUG oslo_vmware.rw_handles [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lease: (returnval){ [ 1046.542742] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5238c194-1765-702d-fb45-6384b614fad7" [ 1046.542742] env[61978]: _type = "HttpNfcLease" [ 1046.542742] env[61978]: } obtained for exporting VM: (result){ [ 1046.542742] env[61978]: value = "vm-295930" [ 1046.542742] env[61978]: _type = "VirtualMachine" [ 1046.542742] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1046.543140] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the lease: (returnval){ [ 1046.543140] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5238c194-1765-702d-fb45-6384b614fad7" [ 1046.543140] env[61978]: _type = "HttpNfcLease" [ 1046.543140] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1046.548858] env[61978]: DEBUG oslo_vmware.rw_handles [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52299151-28a8-4440-ab0f-9e7c88999c5d/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1046.548858] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Uploaded image 48818170-fdfe-4bfb-b961-d9a9ec32e870 to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1046.550368] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1046.553792] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-62d736fe-68e2-4f06-acbe-c2e7912d36c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.555923] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1046.555923] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5238c194-1765-702d-fb45-6384b614fad7" [ 1046.555923] env[61978]: _type = "HttpNfcLease" [ 1046.555923] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1046.556515] env[61978]: DEBUG oslo_vmware.rw_handles [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1046.556515] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5238c194-1765-702d-fb45-6384b614fad7" [ 1046.556515] env[61978]: _type = "HttpNfcLease" [ 1046.556515] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1046.557244] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5decee84-804e-48fe-85bd-e38dc95ec2ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.561511] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1046.561511] env[61978]: value = "task-1395214" [ 1046.561511] env[61978]: _type = "Task" [ 1046.561511] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.568648] env[61978]: DEBUG oslo_vmware.rw_handles [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5285565d-efa8-ae52-2c7b-63dc87a6c8a0/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1046.568897] env[61978]: DEBUG oslo_vmware.rw_handles [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5285565d-efa8-ae52-2c7b-63dc87a6c8a0/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1046.633915] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395214, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.666960] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f41c91e0-9eab-4dbd-851e-541248b49e62 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.819819] env[61978]: DEBUG nova.objects.instance [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'flavor' on Instance uuid f3c837fb-be7e-40a6-aae4-7f213c62ab2c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1046.864360] env[61978]: DEBUG nova.network.neutron [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Successfully updated port: 3b5e4ed3-f9fb-4eed-b851-213b746751b7 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1046.981952] env[61978]: DEBUG nova.compute.manager [req-6a9f5cc2-9e9b-4358-87d9-bf0b8184c992 req-8b16a54e-ad95-4c3f-a0f2-a8d301e9e6e1 service nova] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Received event network-vif-plugged-3b5e4ed3-f9fb-4eed-b851-213b746751b7 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1046.981952] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a9f5cc2-9e9b-4358-87d9-bf0b8184c992 req-8b16a54e-ad95-4c3f-a0f2-a8d301e9e6e1 service nova] Acquiring lock "0cdff646-34ad-49d5-b775-28e8e7ce778e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.981952] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a9f5cc2-9e9b-4358-87d9-bf0b8184c992 req-8b16a54e-ad95-4c3f-a0f2-a8d301e9e6e1 service nova] Lock "0cdff646-34ad-49d5-b775-28e8e7ce778e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.981952] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a9f5cc2-9e9b-4358-87d9-bf0b8184c992 req-8b16a54e-ad95-4c3f-a0f2-a8d301e9e6e1 service nova] Lock "0cdff646-34ad-49d5-b775-28e8e7ce778e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.982979] env[61978]: DEBUG nova.compute.manager [req-6a9f5cc2-9e9b-4358-87d9-bf0b8184c992 req-8b16a54e-ad95-4c3f-a0f2-a8d301e9e6e1 service nova] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] No waiting events found dispatching network-vif-plugged-3b5e4ed3-f9fb-4eed-b851-213b746751b7 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1046.982979] env[61978]: WARNING nova.compute.manager [req-6a9f5cc2-9e9b-4358-87d9-bf0b8184c992 req-8b16a54e-ad95-4c3f-a0f2-a8d301e9e6e1 service nova] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Received unexpected event network-vif-plugged-3b5e4ed3-f9fb-4eed-b851-213b746751b7 for instance with vm_state building and task_state spawning. [ 1047.011451] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395212, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.026834] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.051096] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "b932d221-aca9-4853-aa9c-2d27981e878c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.051353] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "b932d221-aca9-4853-aa9c-2d27981e878c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.073078] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395214, 'name': Destroy_Task, 'duration_secs': 0.408763} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.073425] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Destroyed the VM [ 1047.073678] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1047.074049] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-37c0b1d8-647c-4dfa-8f17-2e7d079140a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.082620] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1047.082620] env[61978]: value = "task-1395215" [ 1047.082620] env[61978]: _type = "Task" [ 1047.082620] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.094293] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395215, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.366485] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.366974] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.002s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.367233] env[61978]: DEBUG nova.compute.manager [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1047.368654] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace6021f-028b-4842-b856-ad0941e37cee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.372919] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "refresh_cache-0cdff646-34ad-49d5-b775-28e8e7ce778e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.373318] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired lock "refresh_cache-0cdff646-34ad-49d5-b775-28e8e7ce778e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.373318] env[61978]: DEBUG nova.network.neutron [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1047.383963] env[61978]: DEBUG nova.compute.manager [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61978) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1047.387826] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1047.388123] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35cca554-88a9-4f32-b7f8-530952d9c619 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.398579] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1047.398579] env[61978]: value = "task-1395216" [ 1047.398579] env[61978]: _type = "Task" [ 1047.398579] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.410802] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395216, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.470043] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.234s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.475835] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.765s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.476157] env[61978]: DEBUG nova.objects.instance [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lazy-loading 'resources' on Instance uuid 50788030-4dc2-4215-bf2c-acba5dd33ce4 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.513488] env[61978]: DEBUG oslo_vmware.api [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395212, 'name': PowerOnVM_Task, 'duration_secs': 0.8289} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.513918] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1047.514259] env[61978]: INFO nova.compute.manager [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Took 8.55 seconds to spawn the instance on the hypervisor. [ 1047.514470] env[61978]: DEBUG nova.compute.manager [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1047.515716] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ed43cd-8f14-414a-bfa6-f6bf4fb9497b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.554306] env[61978]: DEBUG nova.compute.manager [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1047.598319] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395215, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.603146] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5f3ac966-b749-4c0c-9ee7-b042e4005453 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "c17c986e-c008-4414-8dd1-4ea836458048" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.603598] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5f3ac966-b749-4c0c-9ee7-b042e4005453 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "c17c986e-c008-4414-8dd1-4ea836458048" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.603865] env[61978]: DEBUG nova.compute.manager [None req-5f3ac966-b749-4c0c-9ee7-b042e4005453 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1047.605192] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c035a057-c49d-4267-bd87-939ca8106b11 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.613905] env[61978]: DEBUG nova.compute.manager [None req-5f3ac966-b749-4c0c-9ee7-b042e4005453 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61978) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1047.615426] env[61978]: DEBUG nova.objects.instance [None req-5f3ac966-b749-4c0c-9ee7-b042e4005453 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'flavor' on Instance uuid c17c986e-c008-4414-8dd1-4ea836458048 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.678426] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "f4034944-3a9d-4e14-a545-0bf574465e0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.678703] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "f4034944-3a9d-4e14-a545-0bf574465e0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.837392] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5cf5ae6b-921e-42b3-af74-e21cbdaaa0f7 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.373s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.912774] env[61978]: DEBUG oslo_vmware.api [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395216, 'name': PowerOffVM_Task, 'duration_secs': 0.322618} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.912774] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1047.912774] env[61978]: DEBUG nova.compute.manager [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1047.912774] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f9bdd8-7118-4001-9831-c8755e37a94e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.941175] env[61978]: DEBUG nova.network.neutron [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1048.048927] env[61978]: INFO nova.compute.manager [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Took 27.18 seconds to build instance. [ 1048.083678] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.105794] env[61978]: DEBUG oslo_vmware.api [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395215, 'name': RemoveSnapshot_Task, 'duration_secs': 0.837165} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.108820] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1048.109214] env[61978]: INFO nova.compute.manager [None req-521a0b42-5df0-4b0b-a235-8658eb537e7d tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Took 19.14 seconds to snapshot the instance on the hypervisor. [ 1048.122306] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f3ac966-b749-4c0c-9ee7-b042e4005453 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.122719] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da4a3bdd-28e7-45a1-a105-778fd31fc34a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.132428] env[61978]: DEBUG oslo_vmware.api [None req-5f3ac966-b749-4c0c-9ee7-b042e4005453 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1048.132428] env[61978]: value = "task-1395217" [ 1048.132428] env[61978]: _type = "Task" [ 1048.132428] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.152358] env[61978]: DEBUG oslo_concurrency.lockutils [None req-539795c4-92d6-43d7-aba2-c24bb289e4c4 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.152358] env[61978]: DEBUG oslo_concurrency.lockutils [None req-539795c4-92d6-43d7-aba2-c24bb289e4c4 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.152358] env[61978]: DEBUG nova.compute.manager [None req-539795c4-92d6-43d7-aba2-c24bb289e4c4 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1048.152358] env[61978]: DEBUG oslo_vmware.api [None req-5f3ac966-b749-4c0c-9ee7-b042e4005453 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.153666] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa15bbb-ed2d-486d-9c56-2464c95a2304 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.164651] env[61978]: DEBUG nova.compute.manager [None req-539795c4-92d6-43d7-aba2-c24bb289e4c4 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61978) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1048.164651] env[61978]: DEBUG nova.objects.instance [None req-539795c4-92d6-43d7-aba2-c24bb289e4c4 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'flavor' on Instance uuid f3c837fb-be7e-40a6-aae4-7f213c62ab2c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.173237] env[61978]: DEBUG nova.network.neutron [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Updating instance_info_cache with network_info: [{"id": "3b5e4ed3-f9fb-4eed-b851-213b746751b7", "address": "fa:16:3e:11:2f:af", "network": {"id": "a4bfbe6c-0ede-463d-935f-52cc2f1e3692", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1615190107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3c3fe7c7f560427db0f814a2c67bb527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5215e5b-294b-4e8c-bd06-355e9955ab1d", "external-id": "nsx-vlan-transportzone-529", "segmentation_id": 529, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5e4ed3-f9", "ovs_interfaceid": "3b5e4ed3-f9fb-4eed-b851-213b746751b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.184019] env[61978]: DEBUG nova.compute.manager [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1048.385745] env[61978]: DEBUG nova.compute.manager [req-99f119a6-10a3-41ab-865c-c41f4cbe2bda req-023d1b14-fcbe-4413-9fad-95c6b38a573a service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Received event network-vif-deleted-0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1048.385745] env[61978]: INFO nova.compute.manager [req-99f119a6-10a3-41ab-865c-c41f4cbe2bda req-023d1b14-fcbe-4413-9fad-95c6b38a573a service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Neutron deleted interface 0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d; detaching it from the instance and deleting it from the info cache [ 1048.385745] env[61978]: DEBUG nova.network.neutron [req-99f119a6-10a3-41ab-865c-c41f4cbe2bda req-023d1b14-fcbe-4413-9fad-95c6b38a573a service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Updating instance_info_cache with network_info: [{"id": "8664e01e-1422-4709-85a0-c3684ca5733c", "address": "fa:16:3e:c5:e9:39", "network": {"id": "228d9811-3a39-4966-b633-05f07ba74cec", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1088133781", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8664e01e-14", "ovs_interfaceid": "8664e01e-1422-4709-85a0-c3684ca5733c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30edec9f-85c0-41f9-ab16-aea72cc18c06", "address": "fa:16:3e:f8:50:b7", "network": {"id": "7eec4577-9cb8-47c3-89d5-035fe7edc036", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-206202374", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30edec9f-85", "ovs_interfaceid": "30edec9f-85c0-41f9-ab16-aea72cc18c06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.434774] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.068s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.552230] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ef7cf55e-a530-420e-a819-dee9f3fb0b13 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.702s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.580880] env[61978]: DEBUG nova.network.neutron [-] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.619943] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2591de29-1550-4e2d-b01a-c9aa9e9e03d1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.629722] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4915cd9c-1841-4a8f-b030-6545be700fa5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.666443] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6518ce-5ffd-4061-a50b-c8563efaadc5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.674931] env[61978]: DEBUG oslo_vmware.api [None req-5f3ac966-b749-4c0c-9ee7-b042e4005453 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395217, 'name': PowerOffVM_Task, 'duration_secs': 0.401495} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.675777] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-539795c4-92d6-43d7-aba2-c24bb289e4c4 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.677554] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Releasing lock "refresh_cache-0cdff646-34ad-49d5-b775-28e8e7ce778e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.677554] env[61978]: DEBUG nova.compute.manager [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Instance network_info: |[{"id": "3b5e4ed3-f9fb-4eed-b851-213b746751b7", "address": "fa:16:3e:11:2f:af", "network": {"id": "a4bfbe6c-0ede-463d-935f-52cc2f1e3692", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1615190107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3c3fe7c7f560427db0f814a2c67bb527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5215e5b-294b-4e8c-bd06-355e9955ab1d", "external-id": "nsx-vlan-transportzone-529", "segmentation_id": 529, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5e4ed3-f9", "ovs_interfaceid": "3b5e4ed3-f9fb-4eed-b851-213b746751b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1048.677554] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f3ac966-b749-4c0c-9ee7-b042e4005453 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1048.677733] env[61978]: DEBUG nova.compute.manager [None req-5f3ac966-b749-4c0c-9ee7-b042e4005453 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1048.678086] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c310f2ef-04a2-4a35-bdb9-8ce87ff7d328 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.684174] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:2f:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5215e5b-294b-4e8c-bd06-355e9955ab1d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b5e4ed3-f9fb-4eed-b851-213b746751b7', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1048.690632] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Creating folder: Project (3c3fe7c7f560427db0f814a2c67bb527). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1048.691469] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28444b17-e8a7-4136-ba90-faf31bc4da9d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.694876] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1361cb37-0a78-4b22-81e7-52290f7e2fe5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.700852] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affc53f4-1c01-4a6b-be2a-f62be2ea033c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.709198] env[61978]: DEBUG oslo_vmware.api [None req-539795c4-92d6-43d7-aba2-c24bb289e4c4 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1048.709198] env[61978]: value = "task-1395218" [ 1048.709198] env[61978]: _type = "Task" [ 1048.709198] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.728227] env[61978]: DEBUG nova.compute.provider_tree [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.730193] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Created folder: Project (3c3fe7c7f560427db0f814a2c67bb527) in parent group-v295764. [ 1048.730957] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Creating folder: Instances. Parent ref: group-v295931. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1048.732138] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.734148] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c462abfe-1114-4ef3-a9cc-0c508b87b3e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.738990] env[61978]: DEBUG oslo_vmware.api [None req-539795c4-92d6-43d7-aba2-c24bb289e4c4 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395218, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.750522] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Created folder: Instances in parent group-v295931. [ 1048.750851] env[61978]: DEBUG oslo.service.loopingcall [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1048.751142] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1048.751457] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-748eb5d8-59d3-43bb-82bb-d4af27cd6846 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.774094] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1048.774094] env[61978]: value = "task-1395221" [ 1048.774094] env[61978]: _type = "Task" [ 1048.774094] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.783263] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395221, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.849252] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "0d48ae5d-7cc8-42b3-a993-44636e9cb171" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.849252] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "0d48ae5d-7cc8-42b3-a993-44636e9cb171" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.849252] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "0d48ae5d-7cc8-42b3-a993-44636e9cb171-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.849252] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "0d48ae5d-7cc8-42b3-a993-44636e9cb171-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.849252] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "0d48ae5d-7cc8-42b3-a993-44636e9cb171-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.851150] env[61978]: INFO nova.compute.manager [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Terminating instance [ 1048.853729] env[61978]: DEBUG nova.compute.manager [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1048.854007] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.855195] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b688d9-717c-41aa-aa16-c978f52860f4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.863862] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1048.864243] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-070e6f8c-6d88-43a4-9009-d1d7e7556329 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.888282] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75d22792-9d8a-424e-a575-893fd7a0b55c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.900267] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75bd02e1-b4ad-4e4a-b69b-d760b0a1e0c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.950780] env[61978]: DEBUG nova.compute.manager [req-99f119a6-10a3-41ab-865c-c41f4cbe2bda req-023d1b14-fcbe-4413-9fad-95c6b38a573a service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Detach interface failed, port_id=0ac08d0f-ea95-4b4e-9fad-6e6e819ec94d, reason: Instance 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1048.954595] env[61978]: DEBUG nova.compute.manager [req-99f119a6-10a3-41ab-865c-c41f4cbe2bda req-023d1b14-fcbe-4413-9fad-95c6b38a573a service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Received event network-vif-deleted-30edec9f-85c0-41f9-ab16-aea72cc18c06 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1048.958103] env[61978]: INFO nova.compute.manager [req-99f119a6-10a3-41ab-865c-c41f4cbe2bda req-023d1b14-fcbe-4413-9fad-95c6b38a573a service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Neutron deleted interface 30edec9f-85c0-41f9-ab16-aea72cc18c06; detaching it from the instance and deleting it from the info cache [ 1048.958103] env[61978]: DEBUG nova.network.neutron [req-99f119a6-10a3-41ab-865c-c41f4cbe2bda req-023d1b14-fcbe-4413-9fad-95c6b38a573a service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Updating instance_info_cache with network_info: [{"id": "8664e01e-1422-4709-85a0-c3684ca5733c", "address": "fa:16:3e:c5:e9:39", "network": {"id": "228d9811-3a39-4966-b633-05f07ba74cec", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1088133781", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8664e01e-14", "ovs_interfaceid": "8664e01e-1422-4709-85a0-c3684ca5733c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.960175] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1048.960593] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1048.961149] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleting the datastore file [datastore1] 0d48ae5d-7cc8-42b3-a993-44636e9cb171 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1048.961615] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.962275] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb2c19da-999b-4e1a-95fa-379909649042 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.971023] env[61978]: DEBUG oslo_vmware.api [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1048.971023] env[61978]: value = "task-1395223" [ 1048.971023] env[61978]: _type = "Task" [ 1048.971023] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.979160] env[61978]: DEBUG oslo_vmware.api [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.026948] env[61978]: DEBUG oslo_concurrency.lockutils [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "3ee1023c-7837-4db0-88d4-f88c9a43fba3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.027932] env[61978]: DEBUG oslo_concurrency.lockutils [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "3ee1023c-7837-4db0-88d4-f88c9a43fba3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.028331] env[61978]: DEBUG oslo_concurrency.lockutils [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "3ee1023c-7837-4db0-88d4-f88c9a43fba3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.028633] env[61978]: DEBUG oslo_concurrency.lockutils [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "3ee1023c-7837-4db0-88d4-f88c9a43fba3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.028932] env[61978]: DEBUG oslo_concurrency.lockutils [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "3ee1023c-7837-4db0-88d4-f88c9a43fba3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.033756] env[61978]: INFO nova.compute.manager [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Swapping old allocation on dict_keys(['44209228-3464-48ae-bc40-83eccd44b0cf']) held by migration 3552715b-f1cf-4686-a31b-df98ffe8a8b8 for instance [ 1049.034706] env[61978]: INFO nova.compute.manager [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Terminating instance [ 1049.043656] env[61978]: DEBUG nova.compute.manager [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1049.044017] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1049.045160] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a204503a-d1a9-4c8e-9f58-8625d9957a20 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.057647] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.057647] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-daa5e372-7e01-4bc1-8f9b-f0e746ad9b26 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.065031] env[61978]: DEBUG oslo_vmware.api [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1049.065031] env[61978]: value = "task-1395224" [ 1049.065031] env[61978]: _type = "Task" [ 1049.065031] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.069672] env[61978]: DEBUG nova.scheduler.client.report [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Overwriting current allocation {'allocations': {'44209228-3464-48ae-bc40-83eccd44b0cf': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 77}}, 'project_id': '252acdf1eb624fbf91eb9e90c011c038', 'user_id': 'a4782614e183484d800b1a9fbc19e51b', 'consumer_generation': 1} on consumer f930ab49-c215-4b2e-92b1-21c0d52a70eb {{(pid=61978) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1049.077656] env[61978]: DEBUG oslo_vmware.api [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.087531] env[61978]: INFO nova.compute.manager [-] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Took 2.81 seconds to deallocate network for instance. [ 1049.190017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.190256] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquired lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.190412] env[61978]: DEBUG nova.network.neutron [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1049.225290] env[61978]: DEBUG oslo_vmware.api [None req-539795c4-92d6-43d7-aba2-c24bb289e4c4 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395218, 'name': PowerOffVM_Task, 'duration_secs': 0.218449} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.225896] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-539795c4-92d6-43d7-aba2-c24bb289e4c4 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.225896] env[61978]: DEBUG nova.compute.manager [None req-539795c4-92d6-43d7-aba2-c24bb289e4c4 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1049.226857] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bccb7b3b-18da-42e8-9dab-df66ed1ae290 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.232438] env[61978]: DEBUG nova.scheduler.client.report [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1049.242423] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5f3ac966-b749-4c0c-9ee7-b042e4005453 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "c17c986e-c008-4414-8dd1-4ea836458048" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.639s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.285234] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395221, 'name': CreateVM_Task, 'duration_secs': 0.382335} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.285419] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1049.286186] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.286354] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.286727] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1049.286983] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93f23bb3-21b4-479b-ad9f-903d545ff23c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.296901] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1049.296901] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d1448d-f7a6-012b-15a9-889b71020e98" [ 1049.296901] env[61978]: _type = "Task" [ 1049.296901] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.305578] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d1448d-f7a6-012b-15a9-889b71020e98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.463483] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13d6879c-9c20-44ce-ada8-c5d2d0200c6b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.480236] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f17aab-d41a-49e0-be2e-3c6da70994cd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.500530] env[61978]: DEBUG oslo_vmware.api [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395223, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224933} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.502107] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.502107] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1049.502107] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1049.502107] env[61978]: INFO nova.compute.manager [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1049.502107] env[61978]: DEBUG oslo.service.loopingcall [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1049.502565] env[61978]: DEBUG nova.compute.manager [-] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1049.502565] env[61978]: DEBUG nova.network.neutron [-] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1049.524409] env[61978]: DEBUG nova.compute.manager [req-99f119a6-10a3-41ab-865c-c41f4cbe2bda req-023d1b14-fcbe-4413-9fad-95c6b38a573a service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Detach interface failed, port_id=30edec9f-85c0-41f9-ab16-aea72cc18c06, reason: Instance 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1049.526877] env[61978]: DEBUG nova.compute.manager [req-ee1dd15b-9b07-4d52-9ee2-647a0125bba0 req-d0b1b65d-b8ee-42b9-8389-2150789bc205 service nova] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Received event network-changed-3b5e4ed3-f9fb-4eed-b851-213b746751b7 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1049.527088] env[61978]: DEBUG nova.compute.manager [req-ee1dd15b-9b07-4d52-9ee2-647a0125bba0 req-d0b1b65d-b8ee-42b9-8389-2150789bc205 service nova] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Refreshing instance network info cache due to event network-changed-3b5e4ed3-f9fb-4eed-b851-213b746751b7. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1049.527314] env[61978]: DEBUG oslo_concurrency.lockutils [req-ee1dd15b-9b07-4d52-9ee2-647a0125bba0 req-d0b1b65d-b8ee-42b9-8389-2150789bc205 service nova] Acquiring lock "refresh_cache-0cdff646-34ad-49d5-b775-28e8e7ce778e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.527668] env[61978]: DEBUG oslo_concurrency.lockutils [req-ee1dd15b-9b07-4d52-9ee2-647a0125bba0 req-d0b1b65d-b8ee-42b9-8389-2150789bc205 service nova] Acquired lock "refresh_cache-0cdff646-34ad-49d5-b775-28e8e7ce778e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.527668] env[61978]: DEBUG nova.network.neutron [req-ee1dd15b-9b07-4d52-9ee2-647a0125bba0 req-d0b1b65d-b8ee-42b9-8389-2150789bc205 service nova] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Refreshing network info cache for port 3b5e4ed3-f9fb-4eed-b851-213b746751b7 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1049.575807] env[61978]: DEBUG oslo_vmware.api [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395224, 'name': PowerOffVM_Task, 'duration_secs': 0.234878} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.575807] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.575931] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1049.576217] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b518ac6c-863b-49da-840f-cb4caea5d210 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.596244] env[61978]: DEBUG oslo_concurrency.lockutils [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.641275] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.641915] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.641915] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Deleting the datastore file [datastore2] 3ee1023c-7837-4db0-88d4-f88c9a43fba3 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.641996] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57954e00-6ffc-4d53-91c0-146393835cda {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.649324] env[61978]: DEBUG oslo_vmware.api [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1049.649324] env[61978]: value = "task-1395226" [ 1049.649324] env[61978]: _type = "Task" [ 1049.649324] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.660841] env[61978]: DEBUG oslo_vmware.api [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395226, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.739867] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.264s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.747022] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.151s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.747022] env[61978]: INFO nova.compute.claims [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1049.750589] env[61978]: DEBUG oslo_concurrency.lockutils [None req-539795c4-92d6-43d7-aba2-c24bb289e4c4 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.599s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.773672] env[61978]: INFO nova.scheduler.client.report [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleted allocations for instance 50788030-4dc2-4215-bf2c-acba5dd33ce4 [ 1049.817388] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d1448d-f7a6-012b-15a9-889b71020e98, 'name': SearchDatastore_Task, 'duration_secs': 0.011876} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.818634] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.818963] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1049.819268] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.819534] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.819897] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1049.820712] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70a556aa-ff8d-4416-bd99-cb79ef28133a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.830365] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1049.831327] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1049.831327] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe9ad45e-0950-49ca-881f-cfaefe4b0d98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.837896] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1049.837896] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eb8c47-d3a1-8578-a68d-c6ea3358e54c" [ 1049.837896] env[61978]: _type = "Task" [ 1049.837896] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.847284] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eb8c47-d3a1-8578-a68d-c6ea3358e54c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.008641] env[61978]: DEBUG nova.network.neutron [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance_info_cache with network_info: [{"id": "fcd64700-31ef-4310-8986-b22e515b1c55", "address": "fa:16:3e:a7:22:5a", "network": {"id": "7cec99b7-ff56-469d-a81e-88c396bf34e4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "3da19a374c3148e4b31cec361b3dbeed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd64700-31", "ovs_interfaceid": "fcd64700-31ef-4310-8986-b22e515b1c55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.118262] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "a1087abd-28d1-40ac-96ab-dc38392d027c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.119489] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "a1087abd-28d1-40ac-96ab-dc38392d027c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.164692] env[61978]: DEBUG oslo_vmware.api [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395226, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.266981} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.165265] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1050.166432] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1050.166432] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1050.166432] env[61978]: INFO nova.compute.manager [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1050.166432] env[61978]: DEBUG oslo.service.loopingcall [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1050.167150] env[61978]: DEBUG nova.compute.manager [-] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1050.167333] env[61978]: DEBUG nova.network.neutron [-] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1050.285209] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2670292b-fddf-4757-b45d-7aced33d2836 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "50788030-4dc2-4215-bf2c-acba5dd33ce4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.610s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.352925] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eb8c47-d3a1-8578-a68d-c6ea3358e54c, 'name': SearchDatastore_Task, 'duration_secs': 0.017831} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.353970] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-610f58b9-02e5-4252-89ea-b0caec724c41 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.358760] env[61978]: DEBUG nova.network.neutron [-] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.366193] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1050.366193] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528cdefa-6e6e-8bc8-c1dd-ab8209de59cc" [ 1050.366193] env[61978]: _type = "Task" [ 1050.366193] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.376537] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528cdefa-6e6e-8bc8-c1dd-ab8209de59cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.437332] env[61978]: DEBUG nova.network.neutron [req-ee1dd15b-9b07-4d52-9ee2-647a0125bba0 req-d0b1b65d-b8ee-42b9-8389-2150789bc205 service nova] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Updated VIF entry in instance network info cache for port 3b5e4ed3-f9fb-4eed-b851-213b746751b7. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1050.437332] env[61978]: DEBUG nova.network.neutron [req-ee1dd15b-9b07-4d52-9ee2-647a0125bba0 req-d0b1b65d-b8ee-42b9-8389-2150789bc205 service nova] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Updating instance_info_cache with network_info: [{"id": "3b5e4ed3-f9fb-4eed-b851-213b746751b7", "address": "fa:16:3e:11:2f:af", "network": {"id": "a4bfbe6c-0ede-463d-935f-52cc2f1e3692", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1615190107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3c3fe7c7f560427db0f814a2c67bb527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5215e5b-294b-4e8c-bd06-355e9955ab1d", "external-id": "nsx-vlan-transportzone-529", "segmentation_id": 529, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5e4ed3-f9", "ovs_interfaceid": "3b5e4ed3-f9fb-4eed-b851-213b746751b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.512265] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Releasing lock "refresh_cache-f930ab49-c215-4b2e-92b1-21c0d52a70eb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.512265] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.512980] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a03fa8a-99a5-4c35-96b0-01ed6f3892ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.523440] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1050.523440] env[61978]: value = "task-1395227" [ 1050.523440] env[61978]: _type = "Task" [ 1050.523440] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.533813] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395227, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.630769] env[61978]: DEBUG nova.compute.manager [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1050.764649] env[61978]: DEBUG nova.compute.manager [req-177f10e9-9ec5-4a6e-8a73-2d7c47520c6d req-57a9439a-a489-4ddf-a92f-2fb8974a6810 service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Received event network-vif-deleted-8664e01e-1422-4709-85a0-c3684ca5733c {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1050.764804] env[61978]: INFO nova.compute.manager [req-177f10e9-9ec5-4a6e-8a73-2d7c47520c6d req-57a9439a-a489-4ddf-a92f-2fb8974a6810 service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Neutron deleted interface 8664e01e-1422-4709-85a0-c3684ca5733c; detaching it from the instance and deleting it from the info cache [ 1050.765102] env[61978]: DEBUG nova.network.neutron [req-177f10e9-9ec5-4a6e-8a73-2d7c47520c6d req-57a9439a-a489-4ddf-a92f-2fb8974a6810 service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.832145] env[61978]: DEBUG nova.objects.instance [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'flavor' on Instance uuid f3c837fb-be7e-40a6-aae4-7f213c62ab2c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.865576] env[61978]: INFO nova.compute.manager [-] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Took 1.36 seconds to deallocate network for instance. [ 1050.884979] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528cdefa-6e6e-8bc8-c1dd-ab8209de59cc, 'name': SearchDatastore_Task, 'duration_secs': 0.025182} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.885585] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.886972] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 0cdff646-34ad-49d5-b775-28e8e7ce778e/0cdff646-34ad-49d5-b775-28e8e7ce778e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1050.888150] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1568c5a1-4139-40cc-81d2-1f83c53ecf57 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.898596] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1050.898596] env[61978]: value = "task-1395228" [ 1050.898596] env[61978]: _type = "Task" [ 1050.898596] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.906115] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "85fc5af8-454d-4042-841a-945b7e84eb6c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.907060] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "85fc5af8-454d-4042-841a-945b7e84eb6c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.907060] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "85fc5af8-454d-4042-841a-945b7e84eb6c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.907060] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "85fc5af8-454d-4042-841a-945b7e84eb6c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.907060] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "85fc5af8-454d-4042-841a-945b7e84eb6c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.912442] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.913362] env[61978]: INFO nova.compute.manager [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Terminating instance [ 1050.915116] env[61978]: DEBUG nova.compute.manager [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1050.915816] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1050.916359] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93ddf40-e9e4-4789-814d-18c96905d81c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.927933] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.928354] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bee9e480-c77c-4079-a591-3cfaca74aa8a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.936672] env[61978]: DEBUG oslo_vmware.api [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 1050.936672] env[61978]: value = "task-1395229" [ 1050.936672] env[61978]: _type = "Task" [ 1050.936672] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.944972] env[61978]: DEBUG oslo_concurrency.lockutils [req-ee1dd15b-9b07-4d52-9ee2-647a0125bba0 req-d0b1b65d-b8ee-42b9-8389-2150789bc205 service nova] Releasing lock "refresh_cache-0cdff646-34ad-49d5-b775-28e8e7ce778e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.951064] env[61978]: DEBUG oslo_vmware.api [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395229, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.990621] env[61978]: DEBUG nova.network.neutron [-] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.037392] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395227, 'name': PowerOffVM_Task, 'duration_secs': 0.246292} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.037999] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.038542] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T15:05:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='85790f6c-8872-4acd-90a5-40fd0cc369d4',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1158956347',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1051.039104] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1051.039104] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1051.039225] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1051.039411] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1051.042850] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1051.042850] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1051.042850] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1051.042850] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1051.042850] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1051.042850] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1051.046865] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c112f46-f02e-439c-817a-dfeb230dcd29 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.072488] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1051.072488] env[61978]: value = "task-1395230" [ 1051.072488] env[61978]: _type = "Task" [ 1051.072488] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.083481] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395230, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.156437] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.169790] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Acquiring lock "fdd0c16d-b0f8-4f81-9069-34d11f273acb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.170077] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Lock "fdd0c16d-b0f8-4f81-9069-34d11f273acb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.230173] env[61978]: DEBUG nova.compute.manager [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Stashing vm_state: stopped {{(pid=61978) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1051.272104] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46f72af1-35de-4799-b465-81fea896d48e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.284933] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8b620e-f5e2-486d-84b2-ad290d2dfa6d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.326495] env[61978]: DEBUG nova.compute.manager [req-177f10e9-9ec5-4a6e-8a73-2d7c47520c6d req-57a9439a-a489-4ddf-a92f-2fb8974a6810 service nova] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Detach interface failed, port_id=8664e01e-1422-4709-85a0-c3684ca5733c, reason: Instance 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1051.336879] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.337097] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquired lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.337330] env[61978]: DEBUG nova.network.neutron [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1051.337565] env[61978]: DEBUG nova.objects.instance [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'info_cache' on Instance uuid f3c837fb-be7e-40a6-aae4-7f213c62ab2c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.359897] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3c15a4-40eb-40f4-9192-6fd8b7b6f1d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.368724] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc9a97d-c21d-43c8-ac8d-38610b9cb463 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.398306] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.399762] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005e4992-887e-428d-a5be-89f25d09d14e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.415401] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11948a8-18a0-4424-ac6f-67fe1431fb64 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.419566] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395228, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.430160] env[61978]: DEBUG nova.compute.provider_tree [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.447403] env[61978]: DEBUG oslo_vmware.api [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395229, 'name': PowerOffVM_Task, 'duration_secs': 0.22658} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.447732] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.447909] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1051.448210] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b33cb2ed-3fe8-4988-a2fb-df0d4b2c1cec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.494030] env[61978]: INFO nova.compute.manager [-] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Took 1.33 seconds to deallocate network for instance. [ 1051.513723] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1051.513980] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1051.514264] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleting the datastore file [datastore2] 85fc5af8-454d-4042-841a-945b7e84eb6c {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.514579] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-143cf61f-4805-465b-b2b5-8e446f8312f8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.522831] env[61978]: DEBUG oslo_vmware.api [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 1051.522831] env[61978]: value = "task-1395232" [ 1051.522831] env[61978]: _type = "Task" [ 1051.522831] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.530950] env[61978]: DEBUG oslo_vmware.api [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.554284] env[61978]: DEBUG nova.compute.manager [req-b96cc8a4-9f83-4031-a54f-6ee5348f5cce req-c015071e-5d3f-4d06-807f-8deaa63e4344 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Received event network-changed-fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1051.554540] env[61978]: DEBUG nova.compute.manager [req-b96cc8a4-9f83-4031-a54f-6ee5348f5cce req-c015071e-5d3f-4d06-807f-8deaa63e4344 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Refreshing instance network info cache due to event network-changed-fdf95a42-1379-4895-9a94-f8a8cf1d070d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1051.554824] env[61978]: DEBUG oslo_concurrency.lockutils [req-b96cc8a4-9f83-4031-a54f-6ee5348f5cce req-c015071e-5d3f-4d06-807f-8deaa63e4344 service nova] Acquiring lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.555017] env[61978]: DEBUG oslo_concurrency.lockutils [req-b96cc8a4-9f83-4031-a54f-6ee5348f5cce req-c015071e-5d3f-4d06-807f-8deaa63e4344 service nova] Acquired lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.555234] env[61978]: DEBUG nova.network.neutron [req-b96cc8a4-9f83-4031-a54f-6ee5348f5cce req-c015071e-5d3f-4d06-807f-8deaa63e4344 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Refreshing network info cache for port fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1051.557185] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1051.557343] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1051.568537] env[61978]: DEBUG oslo_concurrency.lockutils [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.568809] env[61978]: DEBUG oslo_concurrency.lockutils [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.569122] env[61978]: DEBUG oslo_concurrency.lockutils [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.570027] env[61978]: DEBUG oslo_concurrency.lockutils [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.570027] env[61978]: DEBUG oslo_concurrency.lockutils [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.573823] env[61978]: INFO nova.compute.manager [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Terminating instance [ 1051.579225] env[61978]: DEBUG nova.compute.manager [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1051.579449] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1051.580493] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922feea1-8f83-4594-ad8e-c7dd6da0c659 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.588982] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395230, 'name': ReconfigVM_Task, 'duration_secs': 0.196164} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.591383] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1051.592128] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32106445-ff28-4b92-96a6-fc0c406a0e57 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.594722] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9be9833-c9f3-438e-8e7b-db6e7fa688d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.613196] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T15:05:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='85790f6c-8872-4acd-90a5-40fd0cc369d4',id=39,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1158956347',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1051.613474] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1051.613639] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1051.613826] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1051.613975] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1051.614152] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1051.614367] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1051.614533] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1051.614704] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1051.614871] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1051.615059] env[61978]: DEBUG nova.virt.hardware [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1051.615894] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddd986ea-36c8-4ad7-826a-e589792b628a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.621995] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1051.621995] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c99dc4-b151-d0ff-dbe8-c14a638941fe" [ 1051.621995] env[61978]: _type = "Task" [ 1051.621995] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.631868] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c99dc4-b151-d0ff-dbe8-c14a638941fe, 'name': SearchDatastore_Task, 'duration_secs': 0.007774} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.636924] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Reconfiguring VM instance instance-0000002b to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1051.638009] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-110229ef-0cbc-433d-9f35-03c231f61f76 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.661886] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1051.661886] env[61978]: value = "task-1395234" [ 1051.661886] env[61978]: _type = "Task" [ 1051.661886] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.666418] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1051.666632] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1051.666819] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleting the datastore file [datastore2] 7e71c8de-1f94-4161-8ad8-a67792c5ce24 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.667459] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71015537-5ece-4b1f-bbc7-682005e6f576 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.674832] env[61978]: DEBUG nova.compute.manager [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1051.677332] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395234, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.678678] env[61978]: DEBUG oslo_vmware.api [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1051.678678] env[61978]: value = "task-1395235" [ 1051.678678] env[61978]: _type = "Task" [ 1051.678678] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.686397] env[61978]: DEBUG oslo_vmware.api [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.753396] env[61978]: DEBUG oslo_concurrency.lockutils [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.841734] env[61978]: DEBUG nova.objects.base [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1051.909015] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395228, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553578} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.909315] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 0cdff646-34ad-49d5-b775-28e8e7ce778e/0cdff646-34ad-49d5-b775-28e8e7ce778e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1051.909542] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1051.909802] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4f51c80-4f13-4a25-81b5-141317681463 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.916106] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1051.916106] env[61978]: value = "task-1395236" [ 1051.916106] env[61978]: _type = "Task" [ 1051.916106] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.925361] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395236, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.933547] env[61978]: DEBUG nova.scheduler.client.report [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1052.001786] env[61978]: DEBUG oslo_concurrency.lockutils [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.032039] env[61978]: DEBUG oslo_vmware.api [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250154} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.032039] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.032039] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1052.032309] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1052.032309] env[61978]: INFO nova.compute.manager [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1052.032523] env[61978]: DEBUG oslo.service.loopingcall [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1052.032730] env[61978]: DEBUG nova.compute.manager [-] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1052.032928] env[61978]: DEBUG nova.network.neutron [-] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1052.099733] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "refresh_cache-92eb5edb-803b-48d4-8c4f-338d7c3b3d13" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.100015] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquired lock "refresh_cache-92eb5edb-803b-48d4-8c4f-338d7c3b3d13" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.100055] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Forcefully refreshing network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1052.176357] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395234, 'name': ReconfigVM_Task, 'duration_secs': 0.247779} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.176656] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Reconfigured VM instance instance-0000002b to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1052.177565] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d818d19f-e960-425e-bbd7-4a76c33e97b1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.211698] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] f930ab49-c215-4b2e-92b1-21c0d52a70eb/f930ab49-c215-4b2e-92b1-21c0d52a70eb.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.211698] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-421b218f-cc4f-43a5-9ec0-f72feb67e3ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.226581] env[61978]: DEBUG oslo_vmware.api [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225422} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.227279] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.227579] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.227764] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1052.229010] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1052.229010] env[61978]: INFO nova.compute.manager [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1052.229010] env[61978]: DEBUG oslo.service.loopingcall [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1052.229010] env[61978]: DEBUG nova.compute.manager [-] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1052.229567] env[61978]: DEBUG nova.network.neutron [-] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1052.237279] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1052.237279] env[61978]: value = "task-1395237" [ 1052.237279] env[61978]: _type = "Task" [ 1052.237279] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.250368] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395237, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.430206] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395236, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067513} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.430461] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1052.431464] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70441c3e-335c-4549-98c7-c0675b134919 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.450257] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.450952] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1052.463953] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 0cdff646-34ad-49d5-b775-28e8e7ce778e/0cdff646-34ad-49d5-b775-28e8e7ce778e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.468021] env[61978]: DEBUG nova.network.neutron [req-b96cc8a4-9f83-4031-a54f-6ee5348f5cce req-c015071e-5d3f-4d06-807f-8deaa63e4344 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updated VIF entry in instance network info cache for port fdf95a42-1379-4895-9a94-f8a8cf1d070d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1052.468021] env[61978]: DEBUG nova.network.neutron [req-b96cc8a4-9f83-4031-a54f-6ee5348f5cce req-c015071e-5d3f-4d06-807f-8deaa63e4344 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updating instance_info_cache with network_info: [{"id": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "address": "fa:16:3e:0f:c0:2a", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf95a42-13", "ovs_interfaceid": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.468021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.851s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.469223] env[61978]: INFO nova.compute.claims [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1052.473320] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f53f456f-a316-4baf-8872-91486bc78d8f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.500019] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1052.500019] env[61978]: value = "task-1395238" [ 1052.500019] env[61978]: _type = "Task" [ 1052.500019] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.508174] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395238, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.752883] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395237, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.754660] env[61978]: DEBUG nova.network.neutron [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Updating instance_info_cache with network_info: [{"id": "4bd8d0bd-32e6-47a0-9308-f8aebe253aa4", "address": "fa:16:3e:88:b3:ad", "network": {"id": "4a12b89f-0910-460a-8694-c424a051b6c6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-917593776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df79d3305e464a6b83f18497a2464140", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd8d0bd-32", "ovs_interfaceid": "4bd8d0bd-32e6-47a0-9308-f8aebe253aa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.902921] env[61978]: DEBUG nova.network.neutron [-] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.956388] env[61978]: DEBUG nova.compute.utils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1052.957824] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1052.958008] env[61978]: DEBUG nova.network.neutron [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1052.992183] env[61978]: DEBUG oslo_concurrency.lockutils [req-b96cc8a4-9f83-4031-a54f-6ee5348f5cce req-c015071e-5d3f-4d06-807f-8deaa63e4344 service nova] Releasing lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.992466] env[61978]: DEBUG nova.compute.manager [req-b96cc8a4-9f83-4031-a54f-6ee5348f5cce req-c015071e-5d3f-4d06-807f-8deaa63e4344 service nova] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Received event network-vif-deleted-46a7e991-f936-4f08-bc8b-1ea0bb74eeb9 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1052.992697] env[61978]: DEBUG nova.compute.manager [req-b96cc8a4-9f83-4031-a54f-6ee5348f5cce req-c015071e-5d3f-4d06-807f-8deaa63e4344 service nova] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Received event network-vif-deleted-c0a88faf-13e7-4c53-bd83-b5d1060f8d5b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1053.010182] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395238, 'name': ReconfigVM_Task, 'duration_secs': 0.47348} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.010910] env[61978]: DEBUG nova.policy [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '934ae9fb7c73480292add0c86672649e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b106bb3dbeb4bc9a4fc832c860a559d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1053.012409] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 0cdff646-34ad-49d5-b775-28e8e7ce778e/0cdff646-34ad-49d5-b775-28e8e7ce778e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.013020] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-569183d3-c0c3-47f1-adf9-005f91a65d02 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.020773] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1053.020773] env[61978]: value = "task-1395239" [ 1053.020773] env[61978]: _type = "Task" [ 1053.020773] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.029900] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395239, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.197613] env[61978]: DEBUG nova.network.neutron [-] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.250047] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395237, 'name': ReconfigVM_Task, 'duration_secs': 0.523638} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.251816] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Reconfigured VM instance instance-0000002b to attach disk [datastore2] f930ab49-c215-4b2e-92b1-21c0d52a70eb/f930ab49-c215-4b2e-92b1-21c0d52a70eb.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.252788] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f084fe61-875f-457a-a26a-8f5b846faefa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.257111] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Releasing lock "refresh_cache-f3c837fb-be7e-40a6-aae4-7f213c62ab2c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.278916] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef00c2f9-224a-4722-9bde-61fb00d1ac56 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.299330] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0144c992-1329-4b2f-89ab-992bf7a61ee1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.320472] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763db4a8-4547-4bcf-be6a-d4944749846d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.331533] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1053.331972] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bbdf0d46-8f09-4738-b080-6f019a8cb81b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.338334] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1053.338334] env[61978]: value = "task-1395240" [ 1053.338334] env[61978]: _type = "Task" [ 1053.338334] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.349236] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395240, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.394045] env[61978]: DEBUG nova.network.neutron [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Successfully created port: 3e20714f-6e99-445a-af4a-2d7a05b46f72 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1053.407128] env[61978]: INFO nova.compute.manager [-] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Took 1.37 seconds to deallocate network for instance. [ 1053.460632] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Updating instance_info_cache with network_info: [{"id": "15ee1476-11da-4794-a070-c4365a572948", "address": "fa:16:3e:02:56:f2", "network": {"id": "bc368623-cfb2-43fb-a7d6-8dd238bd961d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1360202280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d7394d965f94155a34dd0ecc0957649", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15ee1476-11", "ovs_interfaceid": "15ee1476-11da-4794-a070-c4365a572948", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.463040] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1053.534510] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395239, 'name': Rename_Task, 'duration_secs': 0.213154} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.537781] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1053.538490] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-002ef7d7-2218-4b93-80a3-b9b6513b7eeb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.547064] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1053.547064] env[61978]: value = "task-1395241" [ 1053.547064] env[61978]: _type = "Task" [ 1053.547064] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.559566] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395241, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.602873] env[61978]: DEBUG nova.compute.manager [req-71d717f0-c3ff-4288-8540-3dcdce0f45a9 req-d9c642a4-7f2d-4ecd-96b2-bb35e5fb820d service nova] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Received event network-vif-deleted-5cb15476-a062-4b08-8f77-6955b8086740 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1053.603066] env[61978]: DEBUG nova.compute.manager [req-71d717f0-c3ff-4288-8540-3dcdce0f45a9 req-d9c642a4-7f2d-4ecd-96b2-bb35e5fb820d service nova] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Received event network-vif-deleted-7f676fff-3b85-4a9d-b8f4-68c0ea6eda7f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1053.702769] env[61978]: INFO nova.compute.manager [-] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Took 1.47 seconds to deallocate network for instance. [ 1053.779285] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1053.779845] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5eb7a009-a17d-4a26-9dbd-a84d1367e2db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.788519] env[61978]: DEBUG oslo_vmware.api [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1053.788519] env[61978]: value = "task-1395242" [ 1053.788519] env[61978]: _type = "Task" [ 1053.788519] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.800088] env[61978]: DEBUG oslo_vmware.api [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395242, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.853035] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395240, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.913932] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.972636] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Releasing lock "refresh_cache-92eb5edb-803b-48d4-8c4f-338d7c3b3d13" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.972973] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Updated the network info_cache for instance {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1053.973874] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.974108] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Cleaning up deleted instances with incomplete migration {{(pid=61978) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1054.001216] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bacf1ff5-22b1-4d02-8990-27bba2bb7d76 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.009103] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd52296-ba7d-4d6b-a998-35eb3226948c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.040640] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac73d3f1-3599-4aaa-b835-78b2a310def4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.052785] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2bae4a-a6f3-48aa-ac82-2360123e5037 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.061730] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395241, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.070321] env[61978]: DEBUG nova.compute.provider_tree [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.209863] env[61978]: DEBUG oslo_concurrency.lockutils [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.300013] env[61978]: DEBUG oslo_vmware.api [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395242, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.352228] env[61978]: DEBUG oslo_vmware.api [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395240, 'name': PowerOnVM_Task, 'duration_secs': 0.634614} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.352547] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1054.475590] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1054.507973] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1054.508363] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1054.508622] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1054.508879] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1054.509116] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1054.509322] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1054.509603] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1054.509820] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1054.510065] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1054.510351] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1054.510605] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1054.511875] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f4592f-7f74-47d1-a5dc-20cc413d4f4a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.522718] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc84bf5f-1a0e-4634-ad90-cf376ac78a6f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.556777] env[61978]: DEBUG oslo_vmware.api [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395241, 'name': PowerOnVM_Task, 'duration_secs': 0.582409} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.557088] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1054.557304] env[61978]: INFO nova.compute.manager [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1054.557501] env[61978]: DEBUG nova.compute.manager [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1054.558289] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239080eb-9256-42a1-969d-fe41564e14e7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.573942] env[61978]: DEBUG nova.scheduler.client.report [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1054.798880] env[61978]: DEBUG oslo_vmware.api [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395242, 'name': PowerOnVM_Task, 'duration_secs': 0.553221} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.799943] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1054.799943] env[61978]: DEBUG nova.compute.manager [None req-d471948d-f22f-43b0-97fc-e8d7bd459d5f tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1054.800110] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064e2327-731e-4c5b-aed9-060c8247a1fa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.964093] env[61978]: DEBUG nova.network.neutron [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Successfully updated port: 3e20714f-6e99-445a-af4a-2d7a05b46f72 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1055.075911] env[61978]: INFO nova.compute.manager [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Took 13.36 seconds to build instance. [ 1055.079070] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.612s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.079070] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1055.081325] env[61978]: DEBUG oslo_concurrency.lockutils [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.276s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.081497] env[61978]: DEBUG nova.objects.instance [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lazy-loading 'resources' on Instance uuid b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.363527] env[61978]: INFO nova.compute.manager [None req-f221a826-f5e5-4c26-a1f1-a1d8b8445490 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance to original state: 'active' [ 1055.468699] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "refresh_cache-f3a9f204-e4ed-49f1-85ef-8cea7377cf89" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1055.468863] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired lock "refresh_cache-f3a9f204-e4ed-49f1-85ef-8cea7377cf89" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.469123] env[61978]: DEBUG nova.network.neutron [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1055.505656] env[61978]: DEBUG oslo_vmware.rw_handles [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5285565d-efa8-ae52-2c7b-63dc87a6c8a0/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1055.506720] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a343f61a-03b0-43c6-979c-b7fc26c80ad9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.513613] env[61978]: DEBUG oslo_vmware.rw_handles [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5285565d-efa8-ae52-2c7b-63dc87a6c8a0/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1055.513796] env[61978]: ERROR oslo_vmware.rw_handles [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5285565d-efa8-ae52-2c7b-63dc87a6c8a0/disk-0.vmdk due to incomplete transfer. [ 1055.514051] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-071192d3-83d5-4417-85e9-65e94099013d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.520891] env[61978]: DEBUG oslo_vmware.rw_handles [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5285565d-efa8-ae52-2c7b-63dc87a6c8a0/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1055.521116] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Uploaded image 6d954726-4613-4115-bc63-9b69f59b17f3 to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1055.523548] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1055.523796] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6c9b82d1-56fc-4f56-aee0-757337423ff4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.539193] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1055.539193] env[61978]: value = "task-1395243" [ 1055.539193] env[61978]: _type = "Task" [ 1055.539193] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.549082] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395243, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.577815] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fdefbe37-0c2a-48bf-9850-3df303ddaba1 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "0cdff646-34ad-49d5-b775-28e8e7ce778e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.880s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.584419] env[61978]: DEBUG nova.compute.utils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1055.589376] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1055.589438] env[61978]: DEBUG nova.network.neutron [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1055.633283] env[61978]: DEBUG nova.compute.manager [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Received event network-vif-plugged-3e20714f-6e99-445a-af4a-2d7a05b46f72 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1055.633549] env[61978]: DEBUG oslo_concurrency.lockutils [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] Acquiring lock "f3a9f204-e4ed-49f1-85ef-8cea7377cf89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.633788] env[61978]: DEBUG oslo_concurrency.lockutils [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] Lock "f3a9f204-e4ed-49f1-85ef-8cea7377cf89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.633968] env[61978]: DEBUG oslo_concurrency.lockutils [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] Lock "f3a9f204-e4ed-49f1-85ef-8cea7377cf89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.634239] env[61978]: DEBUG nova.compute.manager [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] No waiting events found dispatching network-vif-plugged-3e20714f-6e99-445a-af4a-2d7a05b46f72 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1055.634353] env[61978]: WARNING nova.compute.manager [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Received unexpected event network-vif-plugged-3e20714f-6e99-445a-af4a-2d7a05b46f72 for instance with vm_state building and task_state spawning. [ 1055.634561] env[61978]: DEBUG nova.compute.manager [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Received event network-changed-3e20714f-6e99-445a-af4a-2d7a05b46f72 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1055.634683] env[61978]: DEBUG nova.compute.manager [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Refreshing instance network info cache due to event network-changed-3e20714f-6e99-445a-af4a-2d7a05b46f72. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1055.634896] env[61978]: DEBUG oslo_concurrency.lockutils [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] Acquiring lock "refresh_cache-f3a9f204-e4ed-49f1-85ef-8cea7377cf89" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1055.645349] env[61978]: DEBUG nova.policy [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '934ae9fb7c73480292add0c86672649e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b106bb3dbeb4bc9a4fc832c860a559d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1055.913786] env[61978]: DEBUG nova.network.neutron [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Successfully created port: 141945f2-7f26-43d2-8d56-5880f14da310 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1056.014438] env[61978]: DEBUG nova.network.neutron [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1056.044328] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955d53ca-4b28-4561-8e70-b59b7078f92f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.052251] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395243, 'name': Destroy_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.055339] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a127ce-e8dc-4fdb-b30a-83de41216e07 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.088981] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36857356-51ae-4d1f-a52e-b34c795ea9a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.091907] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1056.100097] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73ea132-dd99-4bdc-bd90-e601b7b5294f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.117065] env[61978]: DEBUG nova.compute.provider_tree [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.279287] env[61978]: DEBUG nova.network.neutron [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Updating instance_info_cache with network_info: [{"id": "3e20714f-6e99-445a-af4a-2d7a05b46f72", "address": "fa:16:3e:2e:ae:1e", "network": {"id": "f5fa8c48-5b4f-4e9a-a2c2-4bbe15fe743e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1900032982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b106bb3dbeb4bc9a4fc832c860a559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e20714f-6e", "ovs_interfaceid": "3e20714f-6e99-445a-af4a-2d7a05b46f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.423530] env[61978]: INFO nova.compute.manager [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Rescuing [ 1056.423936] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "refresh_cache-0cdff646-34ad-49d5-b775-28e8e7ce778e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.424012] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired lock "refresh_cache-0cdff646-34ad-49d5-b775-28e8e7ce778e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.424199] env[61978]: DEBUG nova.network.neutron [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1056.550966] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395243, 'name': Destroy_Task, 'duration_secs': 0.731422} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.551271] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Destroyed the VM [ 1056.551515] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1056.551779] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-38507046-977f-4b8b-82d9-680e44f8b8b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.558808] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1056.558808] env[61978]: value = "task-1395244" [ 1056.558808] env[61978]: _type = "Task" [ 1056.558808] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.567192] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395244, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.620281] env[61978]: DEBUG nova.scheduler.client.report [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1056.782352] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Releasing lock "refresh_cache-f3a9f204-e4ed-49f1-85ef-8cea7377cf89" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.782748] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Instance network_info: |[{"id": "3e20714f-6e99-445a-af4a-2d7a05b46f72", "address": "fa:16:3e:2e:ae:1e", "network": {"id": "f5fa8c48-5b4f-4e9a-a2c2-4bbe15fe743e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1900032982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b106bb3dbeb4bc9a4fc832c860a559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e20714f-6e", "ovs_interfaceid": "3e20714f-6e99-445a-af4a-2d7a05b46f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1056.783139] env[61978]: DEBUG oslo_concurrency.lockutils [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] Acquired lock "refresh_cache-f3a9f204-e4ed-49f1-85ef-8cea7377cf89" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.783351] env[61978]: DEBUG nova.network.neutron [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Refreshing network info cache for port 3e20714f-6e99-445a-af4a-2d7a05b46f72 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1056.784656] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:ae:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e20714f-6e99-445a-af4a-2d7a05b46f72', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1056.793154] env[61978]: DEBUG oslo.service.loopingcall [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1056.794473] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1056.794741] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c569d0a-f97c-41f1-b0e6-b7a4a33b3d7f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.820052] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1056.820052] env[61978]: value = "task-1395245" [ 1056.820052] env[61978]: _type = "Task" [ 1056.820052] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.830979] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395245, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.028753] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.029112] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.029377] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.029543] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.029713] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.032513] env[61978]: INFO nova.compute.manager [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Terminating instance [ 1057.036430] env[61978]: DEBUG nova.compute.manager [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1057.036655] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1057.037775] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616c3793-8605-4260-860b-982f8bd23434 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.045693] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1057.045949] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b7629ac-ab3b-4746-b082-596efdc32050 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.053863] env[61978]: DEBUG oslo_vmware.api [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1057.053863] env[61978]: value = "task-1395246" [ 1057.053863] env[61978]: _type = "Task" [ 1057.053863] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.068254] env[61978]: DEBUG oslo_vmware.api [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395246, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.073971] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395244, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.101352] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1057.126545] env[61978]: DEBUG oslo_concurrency.lockutils [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.045s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.128811] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.102s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.130499] env[61978]: INFO nova.compute.claims [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1057.136727] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1057.137033] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1057.137228] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1057.137416] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1057.137546] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1057.138047] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1057.138047] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1057.138166] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1057.138270] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1057.138443] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1057.138730] env[61978]: DEBUG nova.virt.hardware [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1057.140128] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50f075d-ef9d-46eb-8359-224f87435d59 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.151219] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee13382-6cda-4cee-b1fd-5cfd486b91be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.156881] env[61978]: INFO nova.scheduler.client.report [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Deleted allocations for instance b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3 [ 1057.170656] env[61978]: DEBUG nova.network.neutron [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Updating instance_info_cache with network_info: [{"id": "3b5e4ed3-f9fb-4eed-b851-213b746751b7", "address": "fa:16:3e:11:2f:af", "network": {"id": "a4bfbe6c-0ede-463d-935f-52cc2f1e3692", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1615190107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3c3fe7c7f560427db0f814a2c67bb527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5215e5b-294b-4e8c-bd06-355e9955ab1d", "external-id": "nsx-vlan-transportzone-529", "segmentation_id": 529, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5e4ed3-f9", "ovs_interfaceid": "3b5e4ed3-f9fb-4eed-b851-213b746751b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.330962] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395245, 'name': CreateVM_Task, 'duration_secs': 0.418921} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.331154] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1057.331833] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.332019] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.332349] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1057.332608] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-959360da-572c-40ad-ace2-f250a93985d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.337217] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1057.337217] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cafaeb-1ceb-d7de-18eb-d198d25adb02" [ 1057.337217] env[61978]: _type = "Task" [ 1057.337217] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.345319] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cafaeb-1ceb-d7de-18eb-d198d25adb02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.566816] env[61978]: DEBUG oslo_vmware.api [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395246, 'name': PowerOffVM_Task, 'duration_secs': 0.319615} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.567595] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1057.567765] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.568050] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5906d69d-3a88-4702-ab36-fad7b315746b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.572275] env[61978]: DEBUG oslo_vmware.api [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395244, 'name': RemoveSnapshot_Task, 'duration_secs': 0.692116} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.572704] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1057.572982] env[61978]: INFO nova.compute.manager [None req-30afd951-a154-4a1f-88a9-84231acecb52 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Took 14.75 seconds to snapshot the instance on the hypervisor. [ 1057.652520] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.652748] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.653475] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Deleting the datastore file [datastore2] f930ab49-c215-4b2e-92b1-21c0d52a70eb {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.653475] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17068ffe-a461-40ad-92da-52ed03ad01b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.660760] env[61978]: DEBUG oslo_vmware.api [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1057.660760] env[61978]: value = "task-1395248" [ 1057.660760] env[61978]: _type = "Task" [ 1057.660760] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.673020] env[61978]: DEBUG oslo_vmware.api [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395248, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.675429] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Releasing lock "refresh_cache-0cdff646-34ad-49d5-b775-28e8e7ce778e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.677437] env[61978]: DEBUG oslo_concurrency.lockutils [None req-61c749b1-1dc5-4b66-9371-8eb4b8f71c1c tempest-ListServerFiltersTestJSON-1256653220 tempest-ListServerFiltersTestJSON-1256653220-project-member] Lock "b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.410s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.797887] env[61978]: DEBUG nova.network.neutron [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Updated VIF entry in instance network info cache for port 3e20714f-6e99-445a-af4a-2d7a05b46f72. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1057.798410] env[61978]: DEBUG nova.network.neutron [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Updating instance_info_cache with network_info: [{"id": "3e20714f-6e99-445a-af4a-2d7a05b46f72", "address": "fa:16:3e:2e:ae:1e", "network": {"id": "f5fa8c48-5b4f-4e9a-a2c2-4bbe15fe743e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1900032982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b106bb3dbeb4bc9a4fc832c860a559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e20714f-6e", "ovs_interfaceid": "3e20714f-6e99-445a-af4a-2d7a05b46f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.816105] env[61978]: DEBUG nova.compute.manager [req-5afb5bb2-17f1-47bb-91d6-b7ce5fa5c156 req-ce53a38c-e2ac-426d-8e52-c1103c7f2f53 service nova] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Received event network-vif-plugged-141945f2-7f26-43d2-8d56-5880f14da310 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1057.816527] env[61978]: DEBUG oslo_concurrency.lockutils [req-5afb5bb2-17f1-47bb-91d6-b7ce5fa5c156 req-ce53a38c-e2ac-426d-8e52-c1103c7f2f53 service nova] Acquiring lock "7e6178cf-b7be-46f8-8f8c-8605a09703c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.816897] env[61978]: DEBUG oslo_concurrency.lockutils [req-5afb5bb2-17f1-47bb-91d6-b7ce5fa5c156 req-ce53a38c-e2ac-426d-8e52-c1103c7f2f53 service nova] Lock "7e6178cf-b7be-46f8-8f8c-8605a09703c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.817168] env[61978]: DEBUG oslo_concurrency.lockutils [req-5afb5bb2-17f1-47bb-91d6-b7ce5fa5c156 req-ce53a38c-e2ac-426d-8e52-c1103c7f2f53 service nova] Lock "7e6178cf-b7be-46f8-8f8c-8605a09703c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.817414] env[61978]: DEBUG nova.compute.manager [req-5afb5bb2-17f1-47bb-91d6-b7ce5fa5c156 req-ce53a38c-e2ac-426d-8e52-c1103c7f2f53 service nova] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] No waiting events found dispatching network-vif-plugged-141945f2-7f26-43d2-8d56-5880f14da310 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1057.817674] env[61978]: WARNING nova.compute.manager [req-5afb5bb2-17f1-47bb-91d6-b7ce5fa5c156 req-ce53a38c-e2ac-426d-8e52-c1103c7f2f53 service nova] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Received unexpected event network-vif-plugged-141945f2-7f26-43d2-8d56-5880f14da310 for instance with vm_state building and task_state spawning. [ 1057.849315] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cafaeb-1ceb-d7de-18eb-d198d25adb02, 'name': SearchDatastore_Task, 'duration_secs': 0.041831} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.849696] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.849990] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1057.850301] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.850514] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.850752] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1057.851306] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9c12007-7369-4328-8940-0f0767fb2f6f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.861470] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1057.861728] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1057.862478] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea1cdebc-922b-48e7-8b52-bef5db13795c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.867591] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1057.867591] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52958e4e-dd97-d838-a901-1b41776f71f5" [ 1057.867591] env[61978]: _type = "Task" [ 1057.867591] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.875406] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52958e4e-dd97-d838-a901-1b41776f71f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.905084] env[61978]: DEBUG nova.network.neutron [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Successfully updated port: 141945f2-7f26-43d2-8d56-5880f14da310 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1058.061423] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.061680] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.171142] env[61978]: DEBUG oslo_vmware.api [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395248, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.419755} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.171419] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1058.171833] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1058.172025] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1058.172273] env[61978]: INFO nova.compute.manager [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1058.172525] env[61978]: DEBUG oslo.service.loopingcall [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1058.173078] env[61978]: DEBUG nova.compute.manager [-] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1058.173078] env[61978]: DEBUG nova.network.neutron [-] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1058.209981] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.210157] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-310c22c1-df5a-43e2-9e5c-bca338af52f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.216911] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1058.216911] env[61978]: value = "task-1395249" [ 1058.216911] env[61978]: _type = "Task" [ 1058.216911] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.226009] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395249, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.304304] env[61978]: DEBUG oslo_concurrency.lockutils [req-04e8239a-d9ee-4d8f-aff5-176215c6c6a1 req-0f313cf3-1bce-47dd-bfb8-dd9d504214ff service nova] Releasing lock "refresh_cache-f3a9f204-e4ed-49f1-85ef-8cea7377cf89" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.382567] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52958e4e-dd97-d838-a901-1b41776f71f5, 'name': SearchDatastore_Task, 'duration_secs': 0.037485} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.385118] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94066ae2-ad28-44b6-a327-17db8177b012 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.396044] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1058.396044] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52557338-5e7a-2764-4be5-d7843257dee6" [ 1058.396044] env[61978]: _type = "Task" [ 1058.396044] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.410898] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "refresh_cache-7e6178cf-b7be-46f8-8f8c-8605a09703c7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.411204] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired lock "refresh_cache-7e6178cf-b7be-46f8-8f8c-8605a09703c7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.411469] env[61978]: DEBUG nova.network.neutron [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1058.419920] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52557338-5e7a-2764-4be5-d7843257dee6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.566620] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.656039] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae17a98c-255d-46f9-ae9b-53488b530d2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.664031] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9986da97-7a02-48aa-b0f0-f4ee308a1e6e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.694832] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73880303-4176-44d8-8d6f-d5c25ed54bed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.702315] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba90246-bca6-4359-a91d-bee3d0c3d09d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.715384] env[61978]: DEBUG nova.compute.provider_tree [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.724702] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395249, 'name': PowerOffVM_Task, 'duration_secs': 0.213269} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.725532] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1058.726291] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b92844-e66e-4c4a-bc32-9fe0d0005f4a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.744083] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99310c9-6ddf-4308-9f35-756108f82abf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.774259] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.774862] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-427b60f6-beab-4295-82e8-aac1e27d8568 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.785028] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1058.785028] env[61978]: value = "task-1395250" [ 1058.785028] env[61978]: _type = "Task" [ 1058.785028] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.794889] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1058.795075] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1058.795361] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.795565] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.795703] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1058.795945] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2503e82-17bc-48b6-815e-a3b534dc593b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.804941] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1058.805427] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1058.806243] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b26b9c53-8095-427d-b3f4-3f93256d8e45 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.811296] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1058.811296] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5265180a-3530-7fd6-a7ee-7c2890529893" [ 1058.811296] env[61978]: _type = "Task" [ 1058.811296] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.819044] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5265180a-3530-7fd6-a7ee-7c2890529893, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.908723] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52557338-5e7a-2764-4be5-d7843257dee6, 'name': SearchDatastore_Task, 'duration_secs': 0.015016} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.909276] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.909706] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f3a9f204-e4ed-49f1-85ef-8cea7377cf89/f3a9f204-e4ed-49f1-85ef-8cea7377cf89.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1058.911427] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84fa4bbf-2829-4e8e-b26e-4993cd3bc2ba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.919101] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1058.919101] env[61978]: value = "task-1395251" [ 1058.919101] env[61978]: _type = "Task" [ 1058.919101] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.927365] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395251, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.953109] env[61978]: DEBUG nova.network.neutron [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1058.997433] env[61978]: DEBUG nova.network.neutron [-] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.134090] env[61978]: DEBUG nova.network.neutron [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Updating instance_info_cache with network_info: [{"id": "141945f2-7f26-43d2-8d56-5880f14da310", "address": "fa:16:3e:c2:b7:37", "network": {"id": "f5fa8c48-5b4f-4e9a-a2c2-4bbe15fe743e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1900032982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b106bb3dbeb4bc9a4fc832c860a559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap141945f2-7f", "ovs_interfaceid": "141945f2-7f26-43d2-8d56-5880f14da310", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.219571] env[61978]: DEBUG nova.scheduler.client.report [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1059.324614] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5265180a-3530-7fd6-a7ee-7c2890529893, 'name': SearchDatastore_Task, 'duration_secs': 0.015634} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.325619] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de9d92a0-6b5d-4752-be77-c68e6c894eb7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.333840] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1059.333840] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f2ca95-2a75-6708-8ace-5f7a1d4c4f66" [ 1059.333840] env[61978]: _type = "Task" [ 1059.333840] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.343244] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f2ca95-2a75-6708-8ace-5f7a1d4c4f66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.427156] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395251, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.501668] env[61978]: INFO nova.compute.manager [-] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Took 1.33 seconds to deallocate network for instance. [ 1059.640375] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Releasing lock "refresh_cache-7e6178cf-b7be-46f8-8f8c-8605a09703c7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.640732] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Instance network_info: |[{"id": "141945f2-7f26-43d2-8d56-5880f14da310", "address": "fa:16:3e:c2:b7:37", "network": {"id": "f5fa8c48-5b4f-4e9a-a2c2-4bbe15fe743e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1900032982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b106bb3dbeb4bc9a4fc832c860a559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap141945f2-7f", "ovs_interfaceid": "141945f2-7f26-43d2-8d56-5880f14da310", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1059.641261] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:b7:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '141945f2-7f26-43d2-8d56-5880f14da310', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1059.649235] env[61978]: DEBUG oslo.service.loopingcall [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1059.649490] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1059.649735] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e907828d-869a-4006-9699-38408caa502b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.673531] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1059.673531] env[61978]: value = "task-1395252" [ 1059.673531] env[61978]: _type = "Task" [ 1059.673531] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.681468] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395252, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.726706] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.598s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.727240] env[61978]: DEBUG nova.compute.manager [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1059.729965] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.646s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.731314] env[61978]: INFO nova.compute.claims [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1059.844766] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f2ca95-2a75-6708-8ace-5f7a1d4c4f66, 'name': SearchDatastore_Task, 'duration_secs': 0.016615} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.845086] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.845423] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 0cdff646-34ad-49d5-b775-28e8e7ce778e/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk. {{(pid=61978) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1059.845698] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1341c8a-a9de-43f1-b5a6-010e4d6af1e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.852279] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1059.852279] env[61978]: value = "task-1395253" [ 1059.852279] env[61978]: _type = "Task" [ 1059.852279] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.860144] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395253, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.928092] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395251, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.773497} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.928390] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f3a9f204-e4ed-49f1-85ef-8cea7377cf89/f3a9f204-e4ed-49f1-85ef-8cea7377cf89.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1059.928675] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1059.928938] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78e2bda0-da45-4a8e-9863-44e15ed99052 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.940205] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1059.940205] env[61978]: value = "task-1395254" [ 1059.940205] env[61978]: _type = "Task" [ 1059.940205] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.948855] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395254, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.986175] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "cb004a19-0048-4766-af7c-0fbde867f422" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.986621] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "cb004a19-0048-4766-af7c-0fbde867f422" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.986895] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "cb004a19-0048-4766-af7c-0fbde867f422-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.987278] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "cb004a19-0048-4766-af7c-0fbde867f422-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.988169] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "cb004a19-0048-4766-af7c-0fbde867f422-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.993602] env[61978]: INFO nova.compute.manager [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Terminating instance [ 1059.999242] env[61978]: DEBUG nova.compute.manager [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1059.999518] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1060.000720] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204d2cba-7923-4f98-a65a-2d47d50225fe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.012301] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.014310] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1060.014310] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-257ee0ee-a546-4eb6-8e16-76840f1f994f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.020407] env[61978]: DEBUG oslo_vmware.api [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1060.020407] env[61978]: value = "task-1395255" [ 1060.020407] env[61978]: _type = "Task" [ 1060.020407] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.028223] env[61978]: DEBUG nova.compute.manager [req-4154096a-5fe0-4fe8-8450-ad05e25c9541 req-aa7e8558-ec67-4bbe-b090-4227c6372532 service nova] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Received event network-changed-141945f2-7f26-43d2-8d56-5880f14da310 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1060.028223] env[61978]: DEBUG nova.compute.manager [req-4154096a-5fe0-4fe8-8450-ad05e25c9541 req-aa7e8558-ec67-4bbe-b090-4227c6372532 service nova] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Refreshing instance network info cache due to event network-changed-141945f2-7f26-43d2-8d56-5880f14da310. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1060.028381] env[61978]: DEBUG oslo_concurrency.lockutils [req-4154096a-5fe0-4fe8-8450-ad05e25c9541 req-aa7e8558-ec67-4bbe-b090-4227c6372532 service nova] Acquiring lock "refresh_cache-7e6178cf-b7be-46f8-8f8c-8605a09703c7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.028645] env[61978]: DEBUG oslo_concurrency.lockutils [req-4154096a-5fe0-4fe8-8450-ad05e25c9541 req-aa7e8558-ec67-4bbe-b090-4227c6372532 service nova] Acquired lock "refresh_cache-7e6178cf-b7be-46f8-8f8c-8605a09703c7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.028942] env[61978]: DEBUG nova.network.neutron [req-4154096a-5fe0-4fe8-8450-ad05e25c9541 req-aa7e8558-ec67-4bbe-b090-4227c6372532 service nova] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Refreshing network info cache for port 141945f2-7f26-43d2-8d56-5880f14da310 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1060.036058] env[61978]: DEBUG oslo_vmware.api [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395255, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.184462] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395252, 'name': CreateVM_Task, 'duration_secs': 0.369107} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.184662] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1060.185439] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.185620] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.185955] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1060.186243] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd1e86e4-9b3e-4ae7-a1f2-b3384370f73d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.191341] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1060.191341] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e1911f-4f3d-5caa-d395-c04c94805f23" [ 1060.191341] env[61978]: _type = "Task" [ 1060.191341] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.200096] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e1911f-4f3d-5caa-d395-c04c94805f23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.236156] env[61978]: DEBUG nova.compute.utils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1060.241544] env[61978]: DEBUG nova.compute.manager [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1060.242479] env[61978]: DEBUG nova.network.neutron [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1060.299379] env[61978]: DEBUG nova.policy [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8f50bac42274555ab08e047cdb028ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43ebac7c44604f55b94cbc06648f4908', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1060.362926] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395253, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.450158] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395254, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077497} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.450445] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1060.451434] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd238408-65b2-43c9-adfa-ce40708f08c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.474561] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] f3a9f204-e4ed-49f1-85ef-8cea7377cf89/f3a9f204-e4ed-49f1-85ef-8cea7377cf89.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1060.474858] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba32e9d2-61ff-4470-90d5-ba60d631061e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.493739] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1060.493739] env[61978]: value = "task-1395256" [ 1060.493739] env[61978]: _type = "Task" [ 1060.493739] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.501729] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395256, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.529602] env[61978]: DEBUG oslo_vmware.api [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395255, 'name': PowerOffVM_Task, 'duration_secs': 0.188819} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.529854] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1060.530268] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1060.530784] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb7d44f9-fde9-47b5-ab14-e5461969b27a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.594779] env[61978]: DEBUG nova.network.neutron [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Successfully created port: 6d7b8d71-cb74-42e7-a945-feaf0769d81e {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1060.624844] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1060.625446] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1060.625654] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Deleting the datastore file [datastore2] cb004a19-0048-4766-af7c-0fbde867f422 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1060.625974] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b4d5401-6e36-4c3c-9aa7-07ba77aadc29 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.635012] env[61978]: DEBUG oslo_vmware.api [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1060.635012] env[61978]: value = "task-1395258" [ 1060.635012] env[61978]: _type = "Task" [ 1060.635012] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.643929] env[61978]: DEBUG oslo_vmware.api [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395258, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.711286] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e1911f-4f3d-5caa-d395-c04c94805f23, 'name': SearchDatastore_Task, 'duration_secs': 0.018789} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.711727] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.711918] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1060.712184] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.712339] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.712518] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1060.712858] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28be946e-cf24-40fd-b768-59a0f361e13a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.728100] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1060.728301] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1060.729050] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f497be9-9971-4133-bdd1-5c08dc86e243 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.736452] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1060.736452] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52133579-f875-9fac-b816-f9a55766e787" [ 1060.736452] env[61978]: _type = "Task" [ 1060.736452] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.742251] env[61978]: DEBUG nova.compute.manager [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1060.751959] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52133579-f875-9fac-b816-f9a55766e787, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.869037] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395253, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619843} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.869891] env[61978]: INFO nova.virt.vmwareapi.ds_util [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 0cdff646-34ad-49d5-b775-28e8e7ce778e/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk. [ 1060.870704] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8836fcc-10da-4945-925b-08d5cf2eff7d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.884031] env[61978]: DEBUG nova.network.neutron [req-4154096a-5fe0-4fe8-8450-ad05e25c9541 req-aa7e8558-ec67-4bbe-b090-4227c6372532 service nova] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Updated VIF entry in instance network info cache for port 141945f2-7f26-43d2-8d56-5880f14da310. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1060.885055] env[61978]: DEBUG nova.network.neutron [req-4154096a-5fe0-4fe8-8450-ad05e25c9541 req-aa7e8558-ec67-4bbe-b090-4227c6372532 service nova] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Updating instance_info_cache with network_info: [{"id": "141945f2-7f26-43d2-8d56-5880f14da310", "address": "fa:16:3e:c2:b7:37", "network": {"id": "f5fa8c48-5b4f-4e9a-a2c2-4bbe15fe743e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1900032982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b106bb3dbeb4bc9a4fc832c860a559d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap141945f2-7f", "ovs_interfaceid": "141945f2-7f26-43d2-8d56-5880f14da310", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.905741] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 0cdff646-34ad-49d5-b775-28e8e7ce778e/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1060.907083] env[61978]: DEBUG oslo_concurrency.lockutils [req-4154096a-5fe0-4fe8-8450-ad05e25c9541 req-aa7e8558-ec67-4bbe-b090-4227c6372532 service nova] Releasing lock "refresh_cache-7e6178cf-b7be-46f8-8f8c-8605a09703c7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.907586] env[61978]: DEBUG nova.compute.manager [req-4154096a-5fe0-4fe8-8450-ad05e25c9541 req-aa7e8558-ec67-4bbe-b090-4227c6372532 service nova] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Received event network-vif-deleted-fcd64700-31ef-4310-8986-b22e515b1c55 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1060.910151] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9841add-5216-4741-8630-283bdb42d5f1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.930401] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1060.930401] env[61978]: value = "task-1395259" [ 1060.930401] env[61978]: _type = "Task" [ 1060.930401] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.943626] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395259, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.008097] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395256, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.146628] env[61978]: DEBUG oslo_vmware.api [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395258, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.251706] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52133579-f875-9fac-b816-f9a55766e787, 'name': SearchDatastore_Task, 'duration_secs': 0.047072} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.252892] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00f3efe9-36b0-4f5e-a4db-30ffb975869c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.261986] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1061.261986] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528da3d5-b662-5caf-7f5e-73c1dc809f44" [ 1061.261986] env[61978]: _type = "Task" [ 1061.261986] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.270578] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528da3d5-b662-5caf-7f5e-73c1dc809f44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.362712] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df812d10-f5c8-4bc1-b76b-53c3efec671b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.371800] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5a416e-efa0-4945-b90f-2cda9983468c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.405870] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bace4f1-d2e4-44d7-a679-6e3323a7b597 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.413505] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9e9714-642a-44a7-b653-6de43e710271 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.430137] env[61978]: DEBUG nova.compute.provider_tree [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.438551] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395259, 'name': ReconfigVM_Task, 'duration_secs': 0.481578} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.438948] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 0cdff646-34ad-49d5-b775-28e8e7ce778e/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1061.439844] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0f9f77-4ffc-4678-872a-20602639862f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.465334] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dcacd111-df5a-46dd-8c61-c5e1744a718a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.482383] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1061.482383] env[61978]: value = "task-1395260" [ 1061.482383] env[61978]: _type = "Task" [ 1061.482383] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.494571] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395260, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.503079] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395256, 'name': ReconfigVM_Task, 'duration_secs': 0.909418} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.503444] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Reconfigured VM instance instance-0000003c to attach disk [datastore2] f3a9f204-e4ed-49f1-85ef-8cea7377cf89/f3a9f204-e4ed-49f1-85ef-8cea7377cf89.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1061.504126] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cab8b75c-9288-4b60-a259-b9567c0e08d1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.515033] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1061.515033] env[61978]: value = "task-1395261" [ 1061.515033] env[61978]: _type = "Task" [ 1061.515033] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.522972] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395261, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.648681] env[61978]: DEBUG oslo_vmware.api [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395258, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.647551} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.648989] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1061.649395] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1061.649522] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1061.649616] env[61978]: INFO nova.compute.manager [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1061.649880] env[61978]: DEBUG oslo.service.loopingcall [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1061.650096] env[61978]: DEBUG nova.compute.manager [-] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1061.650217] env[61978]: DEBUG nova.network.neutron [-] [instance: cb004a19-0048-4766-af7c-0fbde867f422] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1061.766089] env[61978]: DEBUG nova.compute.manager [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1061.784593] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528da3d5-b662-5caf-7f5e-73c1dc809f44, 'name': SearchDatastore_Task, 'duration_secs': 0.013056} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.784593] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.784917] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 7e6178cf-b7be-46f8-8f8c-8605a09703c7/7e6178cf-b7be-46f8-8f8c-8605a09703c7.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1061.784917] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18534804-4fe8-4b43-bd79-8c080fe7c07d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.794680] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1061.794680] env[61978]: value = "task-1395262" [ 1061.794680] env[61978]: _type = "Task" [ 1061.794680] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.805580] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.808235] env[61978]: DEBUG nova.virt.hardware [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1061.808235] env[61978]: DEBUG nova.virt.hardware [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1061.808235] env[61978]: DEBUG nova.virt.hardware [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1061.808402] env[61978]: DEBUG nova.virt.hardware [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1061.810132] env[61978]: DEBUG nova.virt.hardware [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1061.810132] env[61978]: DEBUG nova.virt.hardware [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1061.810132] env[61978]: DEBUG nova.virt.hardware [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1061.810132] env[61978]: DEBUG nova.virt.hardware [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1061.810132] env[61978]: DEBUG nova.virt.hardware [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1061.810132] env[61978]: DEBUG nova.virt.hardware [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1061.810132] env[61978]: DEBUG nova.virt.hardware [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1061.810569] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ff9817-f35f-4e07-9446-7bf623d8e424 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.821176] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0c2a03-6bac-4425-be9c-cbd429e636e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.935081] env[61978]: DEBUG nova.scheduler.client.report [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1061.995918] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395260, 'name': ReconfigVM_Task, 'duration_secs': 0.273525} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.996295] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1061.996595] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66392444-2704-45d2-88b4-04583994bb1b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.004683] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1062.004683] env[61978]: value = "task-1395263" [ 1062.004683] env[61978]: _type = "Task" [ 1062.004683] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.015028] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395263, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.025317] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395261, 'name': Rename_Task, 'duration_secs': 0.198664} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.025642] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1062.025899] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81e7ea7a-24eb-417f-ab2c-5e3618ed9f0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.034151] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1062.034151] env[61978]: value = "task-1395264" [ 1062.034151] env[61978]: _type = "Task" [ 1062.034151] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.047774] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395264, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.123237] env[61978]: DEBUG nova.compute.manager [req-76d42cde-301e-47d4-8f8d-a0b2755a8ed0 req-eaedf4e5-57b3-439d-be32-efcff54c60f3 service nova] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Received event network-vif-deleted-d9bef177-ead0-4f65-8781-806d6a34ea1d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1062.123500] env[61978]: INFO nova.compute.manager [req-76d42cde-301e-47d4-8f8d-a0b2755a8ed0 req-eaedf4e5-57b3-439d-be32-efcff54c60f3 service nova] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Neutron deleted interface d9bef177-ead0-4f65-8781-806d6a34ea1d; detaching it from the instance and deleting it from the info cache [ 1062.123735] env[61978]: DEBUG nova.network.neutron [req-76d42cde-301e-47d4-8f8d-a0b2755a8ed0 req-eaedf4e5-57b3-439d-be32-efcff54c60f3 service nova] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.309495] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395262, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.325840] env[61978]: DEBUG nova.network.neutron [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Successfully updated port: 6d7b8d71-cb74-42e7-a945-feaf0769d81e {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1062.433096] env[61978]: DEBUG nova.network.neutron [-] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.446022] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.714s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.446022] env[61978]: DEBUG nova.compute.manager [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1062.449380] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.717s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.452840] env[61978]: INFO nova.compute.claims [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1062.515410] env[61978]: DEBUG oslo_vmware.api [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395263, 'name': PowerOnVM_Task, 'duration_secs': 0.474795} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.515410] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1062.517932] env[61978]: DEBUG nova.compute.manager [None req-0e0a0efa-4aa6-4a50-8e92-11218a91f61d tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1062.519627] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37a5298-f4bf-4f02-aa97-2d36d446a24a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.544574] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395264, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.631607] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fe9d101a-2825-4b18-8f1c-93063b2149b7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.641374] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123bc0d5-9fa8-4a0d-8f4c-1c748c725dcd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.680336] env[61978]: DEBUG nova.compute.manager [req-76d42cde-301e-47d4-8f8d-a0b2755a8ed0 req-eaedf4e5-57b3-439d-be32-efcff54c60f3 service nova] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Detach interface failed, port_id=d9bef177-ead0-4f65-8781-806d6a34ea1d, reason: Instance cb004a19-0048-4766-af7c-0fbde867f422 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1062.806470] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395262, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539179} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.807504] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 7e6178cf-b7be-46f8-8f8c-8605a09703c7/7e6178cf-b7be-46f8-8f8c-8605a09703c7.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1062.807504] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1062.807504] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc009492-cf91-4957-8896-76958e6af658 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.814749] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1062.814749] env[61978]: value = "task-1395265" [ 1062.814749] env[61978]: _type = "Task" [ 1062.814749] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.823095] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395265, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.828413] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "refresh_cache-a0ea73d1-a613-4403-8527-a8b81a619adf" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.828477] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "refresh_cache-a0ea73d1-a613-4403-8527-a8b81a619adf" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.828621] env[61978]: DEBUG nova.network.neutron [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1062.936680] env[61978]: INFO nova.compute.manager [-] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Took 1.29 seconds to deallocate network for instance. [ 1062.960216] env[61978]: DEBUG nova.compute.utils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1062.962577] env[61978]: DEBUG nova.compute.manager [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Not allocating networking since 'none' was specified. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1063.046961] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395264, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.099189] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "17c56c1c-9992-4559-ad23-c68909ae6792" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.099189] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "17c56c1c-9992-4559-ad23-c68909ae6792" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.333089] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395265, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180659} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.333089] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1063.333407] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfdc4e0-ea42-43b1-b1b9-89e1d3653b6a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.357209] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] 7e6178cf-b7be-46f8-8f8c-8605a09703c7/7e6178cf-b7be-46f8-8f8c-8605a09703c7.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1063.357386] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f6ce578-bda7-4211-81ac-5b18ef4042e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.379155] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1063.379155] env[61978]: value = "task-1395266" [ 1063.379155] env[61978]: _type = "Task" [ 1063.379155] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.389742] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395266, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.390814] env[61978]: DEBUG nova.network.neutron [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1063.447052] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.463776] env[61978]: DEBUG nova.compute.manager [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1063.549531] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395264, 'name': PowerOnVM_Task, 'duration_secs': 1.065111} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.549531] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1063.549994] env[61978]: INFO nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Took 9.07 seconds to spawn the instance on the hypervisor. [ 1063.549994] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1063.553020] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1d8d40-c353-4643-9e8e-ecc6740ddae6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.572825] env[61978]: DEBUG nova.network.neutron [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Updating instance_info_cache with network_info: [{"id": "6d7b8d71-cb74-42e7-a945-feaf0769d81e", "address": "fa:16:3e:2b:b3:f1", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d7b8d71-cb", "ovs_interfaceid": "6d7b8d71-cb74-42e7-a945-feaf0769d81e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.600972] env[61978]: DEBUG nova.compute.manager [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1063.888626] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395266, 'name': ReconfigVM_Task, 'duration_secs': 0.347303} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.890875] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Reconfigured VM instance instance-0000003d to attach disk [datastore2] 7e6178cf-b7be-46f8-8f8c-8605a09703c7/7e6178cf-b7be-46f8-8f8c-8605a09703c7.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1063.891858] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bcb73eca-1e8f-4550-a270-a47983f143ec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.900079] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1063.900079] env[61978]: value = "task-1395267" [ 1063.900079] env[61978]: _type = "Task" [ 1063.900079] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.912368] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395267, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.923247] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "cf6d8815-ed87-4629-9df9-6f406ac2fe6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.923984] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "cf6d8815-ed87-4629-9df9-6f406ac2fe6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.953908] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c151c753-f6ab-4324-aa9e-dc270f3db09f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.962574] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2968e9c5-046f-456a-8541-5f1e225f92bf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.998033] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eedc80b-d7f9-4e5a-bd79-ef636ef78a04 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.006612] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0820a77a-879b-492b-b486-afa6e2a2cc7c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.019964] env[61978]: DEBUG nova.compute.provider_tree [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1064.073968] env[61978]: INFO nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Took 19.50 seconds to build instance. [ 1064.080334] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "refresh_cache-a0ea73d1-a613-4403-8527-a8b81a619adf" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.080556] env[61978]: DEBUG nova.compute.manager [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Instance network_info: |[{"id": "6d7b8d71-cb74-42e7-a945-feaf0769d81e", "address": "fa:16:3e:2b:b3:f1", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d7b8d71-cb", "ovs_interfaceid": "6d7b8d71-cb74-42e7-a945-feaf0769d81e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1064.081660] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:b3:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed3ffc1d-9f86-4029-857e-6cd1d383edbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d7b8d71-cb74-42e7-a945-feaf0769d81e', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1064.094300] env[61978]: DEBUG oslo.service.loopingcall [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1064.094853] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1064.094853] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9aeee1a1-947c-4e89-8497-697166dfcb91 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.120056] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1064.120056] env[61978]: value = "task-1395268" [ 1064.120056] env[61978]: _type = "Task" [ 1064.120056] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.129340] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395268, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.130416] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.149527] env[61978]: DEBUG nova.compute.manager [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Received event network-vif-plugged-6d7b8d71-cb74-42e7-a945-feaf0769d81e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1064.149785] env[61978]: DEBUG oslo_concurrency.lockutils [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] Acquiring lock "a0ea73d1-a613-4403-8527-a8b81a619adf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.149961] env[61978]: DEBUG oslo_concurrency.lockutils [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] Lock "a0ea73d1-a613-4403-8527-a8b81a619adf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.150232] env[61978]: DEBUG oslo_concurrency.lockutils [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] Lock "a0ea73d1-a613-4403-8527-a8b81a619adf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.150498] env[61978]: DEBUG nova.compute.manager [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] No waiting events found dispatching network-vif-plugged-6d7b8d71-cb74-42e7-a945-feaf0769d81e {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1064.150702] env[61978]: WARNING nova.compute.manager [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Received unexpected event network-vif-plugged-6d7b8d71-cb74-42e7-a945-feaf0769d81e for instance with vm_state building and task_state spawning. [ 1064.150872] env[61978]: DEBUG nova.compute.manager [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Received event network-changed-6d7b8d71-cb74-42e7-a945-feaf0769d81e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1064.151040] env[61978]: DEBUG nova.compute.manager [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Refreshing instance network info cache due to event network-changed-6d7b8d71-cb74-42e7-a945-feaf0769d81e. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1064.151250] env[61978]: DEBUG oslo_concurrency.lockutils [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] Acquiring lock "refresh_cache-a0ea73d1-a613-4403-8527-a8b81a619adf" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.151408] env[61978]: DEBUG oslo_concurrency.lockutils [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] Acquired lock "refresh_cache-a0ea73d1-a613-4403-8527-a8b81a619adf" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.151694] env[61978]: DEBUG nova.network.neutron [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Refreshing network info cache for port 6d7b8d71-cb74-42e7-a945-feaf0769d81e {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1064.412036] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395267, 'name': Rename_Task, 'duration_secs': 0.138496} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.412774] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1064.413822] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1720e6aa-0eaa-42bb-adfd-7c6c60957598 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.420043] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1064.420043] env[61978]: value = "task-1395269" [ 1064.420043] env[61978]: _type = "Task" [ 1064.420043] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.428407] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395269, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.428762] env[61978]: DEBUG nova.compute.manager [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1064.502916] env[61978]: DEBUG nova.compute.manager [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1064.522859] env[61978]: DEBUG nova.scheduler.client.report [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1064.536807] env[61978]: DEBUG nova.virt.hardware [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1064.537414] env[61978]: DEBUG nova.virt.hardware [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1064.537577] env[61978]: DEBUG nova.virt.hardware [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1064.537906] env[61978]: DEBUG nova.virt.hardware [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1064.539030] env[61978]: DEBUG nova.virt.hardware [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1064.539030] env[61978]: DEBUG nova.virt.hardware [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1064.539030] env[61978]: DEBUG nova.virt.hardware [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1064.539269] env[61978]: DEBUG nova.virt.hardware [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1064.539384] env[61978]: DEBUG nova.virt.hardware [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1064.539629] env[61978]: DEBUG nova.virt.hardware [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1064.539937] env[61978]: DEBUG nova.virt.hardware [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1064.541849] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32207969-085c-432e-a6d4-170534363cf9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.555327] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4baae5b7-b5bc-415b-a148-0f150589e221 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.579023] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "f3a9f204-e4ed-49f1-85ef-8cea7377cf89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.028s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.579750] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Instance VIF info [] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1064.589502] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Creating folder: Project (f9385256494446afada750a46761ec68). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1064.590096] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1c1c759-2007-4154-b644-4b01b6477bf9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.604337] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Created folder: Project (f9385256494446afada750a46761ec68) in parent group-v295764. [ 1064.604597] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Creating folder: Instances. Parent ref: group-v295937. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1064.604843] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-532b3636-1848-4a55-b248-a9912571948b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.615200] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Created folder: Instances in parent group-v295937. [ 1064.616781] env[61978]: DEBUG oslo.service.loopingcall [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1064.617157] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1064.617516] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72278376-759b-453d-aa76-52a56b4ed0a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.652662] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395268, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.657785] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1064.657785] env[61978]: value = "task-1395272" [ 1064.657785] env[61978]: _type = "Task" [ 1064.657785] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.671125] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395272, 'name': CreateVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.909078] env[61978]: DEBUG nova.network.neutron [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Updated VIF entry in instance network info cache for port 6d7b8d71-cb74-42e7-a945-feaf0769d81e. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1064.909078] env[61978]: DEBUG nova.network.neutron [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Updating instance_info_cache with network_info: [{"id": "6d7b8d71-cb74-42e7-a945-feaf0769d81e", "address": "fa:16:3e:2b:b3:f1", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d7b8d71-cb", "ovs_interfaceid": "6d7b8d71-cb74-42e7-a945-feaf0769d81e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.931919] env[61978]: DEBUG oslo_vmware.api [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395269, 'name': PowerOnVM_Task, 'duration_secs': 0.482184} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.934963] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1064.934963] env[61978]: INFO nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Took 7.83 seconds to spawn the instance on the hypervisor. [ 1064.935252] env[61978]: DEBUG nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1064.938196] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2440a9a7-0a5d-4928-867f-9e4b90d7b401 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.955786] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.031039] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.581s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1065.031712] env[61978]: DEBUG nova.compute.manager [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1065.034795] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 16.073s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.035011] env[61978]: DEBUG nova.objects.instance [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61978) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1065.150028] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395268, 'name': CreateVM_Task, 'duration_secs': 0.543203} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.150028] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1065.150313] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.150489] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.150824] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1065.151137] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6199fd5c-fa9f-4fcd-a774-1917001d0a9f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.156016] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1065.156016] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5274474a-09a5-0fff-3dcf-3eba1a3a5866" [ 1065.156016] env[61978]: _type = "Task" [ 1065.156016] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.168732] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5274474a-09a5-0fff-3dcf-3eba1a3a5866, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.173693] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395272, 'name': CreateVM_Task, 'duration_secs': 0.335355} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.173939] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1065.174495] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.411442] env[61978]: DEBUG oslo_concurrency.lockutils [req-5bd162b8-0aba-413d-9ac2-3e8417852dc1 req-c12c9ab1-b889-4e3a-8303-02d2c9d0e936 service nova] Releasing lock "refresh_cache-a0ea73d1-a613-4403-8527-a8b81a619adf" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1065.456061] env[61978]: INFO nova.compute.manager [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Took 20.85 seconds to build instance. [ 1065.540859] env[61978]: DEBUG nova.compute.utils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1065.549418] env[61978]: DEBUG nova.compute.manager [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Not allocating networking since 'none' was specified. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1065.668841] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5274474a-09a5-0fff-3dcf-3eba1a3a5866, 'name': SearchDatastore_Task, 'duration_secs': 0.043863} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.668986] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1065.669331] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1065.669547] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.669716] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.669902] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1065.670215] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.670539] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1065.670883] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80e20730-a6fe-4aeb-a6e7-ea4cd7e14ad1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.672886] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b66e7fde-d51c-4853-b3b0-aa2da08d9965 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.678440] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1065.678440] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527c7a3d-6f92-ce01-58d8-1b1e12935cb4" [ 1065.678440] env[61978]: _type = "Task" [ 1065.678440] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.688514] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527c7a3d-6f92-ce01-58d8-1b1e12935cb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.689671] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1065.689856] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1065.690615] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b256fc0b-6d6a-43e5-ba87-5e4f6b19987e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.695660] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1065.695660] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520941b8-23b8-5ec8-c97d-80d771404294" [ 1065.695660] env[61978]: _type = "Task" [ 1065.695660] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.705119] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520941b8-23b8-5ec8-c97d-80d771404294, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.958155] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1d645a5-2ed7-4278-8b5f-cae50690e6db tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "7e6178cf-b7be-46f8-8f8c-8605a09703c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.366s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.050452] env[61978]: DEBUG nova.compute.manager [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1066.053772] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e93e87b3-aa40-42fa-8d0d-5b896f39b389 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.054848] env[61978]: DEBUG oslo_concurrency.lockutils [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.459s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.055149] env[61978]: DEBUG nova.objects.instance [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lazy-loading 'resources' on Instance uuid 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.190178] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527c7a3d-6f92-ce01-58d8-1b1e12935cb4, 'name': SearchDatastore_Task, 'duration_secs': 0.026249} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.190533] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.190776] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1066.190995] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1066.205088] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520941b8-23b8-5ec8-c97d-80d771404294, 'name': SearchDatastore_Task, 'duration_secs': 0.02376} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.205918] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee4e41d8-e1c4-409f-8601-e011af5d8051 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.211033] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1066.211033] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52488a1c-ece3-6aec-e5ee-eaef2ec67845" [ 1066.211033] env[61978]: _type = "Task" [ 1066.211033] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.219478] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52488a1c-ece3-6aec-e5ee-eaef2ec67845, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.724532] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52488a1c-ece3-6aec-e5ee-eaef2ec67845, 'name': SearchDatastore_Task, 'duration_secs': 0.042124} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.729486] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.730076] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a0ea73d1-a613-4403-8527-a8b81a619adf/a0ea73d1-a613-4403-8527-a8b81a619adf.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1066.730913] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.731378] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1066.731834] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4298800-0ee0-41eb-963e-14134f3f0ddf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.735180] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88b5329b-8d90-44e4-922c-d287d7ee3b4d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.743605] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1066.743605] env[61978]: value = "task-1395273" [ 1066.743605] env[61978]: _type = "Task" [ 1066.743605] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.748308] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1066.748507] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1066.751253] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1aeeae43-5a30-4bdb-9e84-74514585a6c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.759546] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.763258] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1066.763258] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eb600b-c237-5f90-f220-a776bba7ac0e" [ 1066.763258] env[61978]: _type = "Task" [ 1066.763258] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.771035] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eb600b-c237-5f90-f220-a776bba7ac0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.790515] env[61978]: DEBUG oslo_concurrency.lockutils [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "f3a9f204-e4ed-49f1-85ef-8cea7377cf89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.790668] env[61978]: DEBUG oslo_concurrency.lockutils [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "f3a9f204-e4ed-49f1-85ef-8cea7377cf89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.790870] env[61978]: DEBUG oslo_concurrency.lockutils [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "f3a9f204-e4ed-49f1-85ef-8cea7377cf89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.791066] env[61978]: DEBUG oslo_concurrency.lockutils [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "f3a9f204-e4ed-49f1-85ef-8cea7377cf89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.791249] env[61978]: DEBUG oslo_concurrency.lockutils [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "f3a9f204-e4ed-49f1-85ef-8cea7377cf89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.793519] env[61978]: INFO nova.compute.manager [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Terminating instance [ 1066.797462] env[61978]: DEBUG nova.compute.manager [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1066.797686] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1066.798923] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e635ab-89e5-4cb1-9d8a-1b59278079db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.806179] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1066.806866] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89ca3739-49c6-480f-be4f-22a588bc4c15 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.812548] env[61978]: DEBUG oslo_vmware.api [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1066.812548] env[61978]: value = "task-1395274" [ 1066.812548] env[61978]: _type = "Task" [ 1066.812548] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.821013] env[61978]: DEBUG oslo_vmware.api [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395274, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.860665] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "7e6178cf-b7be-46f8-8f8c-8605a09703c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.861085] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "7e6178cf-b7be-46f8-8f8c-8605a09703c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.861338] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "7e6178cf-b7be-46f8-8f8c-8605a09703c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.861555] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "7e6178cf-b7be-46f8-8f8c-8605a09703c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.861779] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "7e6178cf-b7be-46f8-8f8c-8605a09703c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.864513] env[61978]: INFO nova.compute.manager [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Terminating instance [ 1066.866516] env[61978]: DEBUG nova.compute.manager [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1066.866710] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1066.867598] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ee7711-eec1-4059-a1fd-7195129e6588 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.878629] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1066.878629] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74a57b94-7d48-45e2-9db5-728f3040f331 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.884589] env[61978]: DEBUG oslo_vmware.api [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1066.884589] env[61978]: value = "task-1395275" [ 1066.884589] env[61978]: _type = "Task" [ 1066.884589] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.896814] env[61978]: DEBUG oslo_vmware.api [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395275, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.033305] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a362b7-0bad-459c-86ff-af58e2f6d319 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.042974] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078fbebc-5033-421d-b0f0-ae378bec38cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.077060] env[61978]: DEBUG nova.compute.manager [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1067.080588] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8014f6e-7242-4f03-85b0-110b9fa8e900 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.090524] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f486927-6fd6-4e6a-a54a-0514df83b064 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.110666] env[61978]: DEBUG nova.compute.provider_tree [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.122044] env[61978]: DEBUG nova.virt.hardware [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1067.124041] env[61978]: DEBUG nova.virt.hardware [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1067.124041] env[61978]: DEBUG nova.virt.hardware [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1067.124041] env[61978]: DEBUG nova.virt.hardware [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1067.124041] env[61978]: DEBUG nova.virt.hardware [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1067.124041] env[61978]: DEBUG nova.virt.hardware [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1067.124041] env[61978]: DEBUG nova.virt.hardware [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1067.124041] env[61978]: DEBUG nova.virt.hardware [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1067.124041] env[61978]: DEBUG nova.virt.hardware [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1067.124041] env[61978]: DEBUG nova.virt.hardware [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1067.124041] env[61978]: DEBUG nova.virt.hardware [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1067.124935] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bac857-5190-417e-9de6-012f7e234130 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.136801] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c443f17-0863-45ba-a0ea-b58a31ba6020 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.154496] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Instance VIF info [] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1067.160719] env[61978]: DEBUG oslo.service.loopingcall [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1067.160903] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1067.161042] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92eb38df-a375-4330-93df-9667e19a9853 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.182039] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1067.182039] env[61978]: value = "task-1395276" [ 1067.182039] env[61978]: _type = "Task" [ 1067.182039] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.191836] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395276, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.257035] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395273, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.273630] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eb600b-c237-5f90-f220-a776bba7ac0e, 'name': SearchDatastore_Task, 'duration_secs': 0.008995} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.274630] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c853786-d446-442c-a674-97a80a1ccc9f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.279909] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1067.279909] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5248d5af-70ed-b0b2-7e71-ef26aa2e6100" [ 1067.279909] env[61978]: _type = "Task" [ 1067.279909] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.287466] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5248d5af-70ed-b0b2-7e71-ef26aa2e6100, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.322581] env[61978]: DEBUG oslo_vmware.api [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395274, 'name': PowerOffVM_Task, 'duration_secs': 0.445546} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.322973] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1067.323083] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1067.323291] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c22bfa8-6573-4b4f-9331-403737be9fde {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.397950] env[61978]: DEBUG oslo_vmware.api [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395275, 'name': PowerOffVM_Task, 'duration_secs': 0.24521} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.398277] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1067.398456] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1067.398718] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cea3a257-68d8-47c8-9a45-bfa5645ab4cb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.616839] env[61978]: DEBUG nova.scheduler.client.report [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1067.692224] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395276, 'name': CreateVM_Task, 'duration_secs': 0.354784} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.692397] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1067.693075] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.693324] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.693682] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1067.693995] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-801d7d9b-87e0-4a14-9cec-0402ce279ab9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.700159] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1067.700159] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b999a8-4139-d281-b746-1b8b15d6c4c0" [ 1067.700159] env[61978]: _type = "Task" [ 1067.700159] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.707807] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b999a8-4139-d281-b746-1b8b15d6c4c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.753526] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395273, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54652} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.753854] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a0ea73d1-a613-4403-8527-a8b81a619adf/a0ea73d1-a613-4403-8527-a8b81a619adf.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1067.754138] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1067.754439] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02fc78f6-09e4-4179-a5d8-0a94ebb5058c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.761561] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1067.761561] env[61978]: value = "task-1395279" [ 1067.761561] env[61978]: _type = "Task" [ 1067.761561] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.768272] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395279, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.791288] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5248d5af-70ed-b0b2-7e71-ef26aa2e6100, 'name': SearchDatastore_Task, 'duration_secs': 0.014319} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.791595] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1067.791877] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] b932d221-aca9-4853-aa9c-2d27981e878c/b932d221-aca9-4853-aa9c-2d27981e878c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1067.792183] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f698bd1b-62c3-4280-a532-c637887c6596 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.798670] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1067.798670] env[61978]: value = "task-1395280" [ 1067.798670] env[61978]: _type = "Task" [ 1067.798670] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.806939] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.121739] env[61978]: DEBUG oslo_concurrency.lockutils [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.067s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.124465] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.968s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.126265] env[61978]: INFO nova.compute.claims [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1068.149774] env[61978]: INFO nova.scheduler.client.report [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Deleted allocations for instance 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5 [ 1068.214993] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b999a8-4139-d281-b746-1b8b15d6c4c0, 'name': SearchDatastore_Task, 'duration_secs': 0.009852} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.215411] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1068.215759] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1068.216176] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1068.216447] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.216736] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1068.217463] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af213213-14f3-495c-8053-318646e92031 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.229977] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1068.230393] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1068.231636] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0383e512-1792-4d43-a63e-dc215386cea5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.239620] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1068.239620] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52066424-1e90-d0e9-520a-2b47e47056b0" [ 1068.239620] env[61978]: _type = "Task" [ 1068.239620] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.251504] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52066424-1e90-d0e9-520a-2b47e47056b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.270347] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395279, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075577} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.270653] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1068.271882] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7dcd4b-c3a3-4609-8b75-883c27cec886 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.293557] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] a0ea73d1-a613-4403-8527-a8b81a619adf/a0ea73d1-a613-4403-8527-a8b81a619adf.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1068.293862] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d43360ec-9c0a-4efc-b534-c33252805b02 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.315931] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395280, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512342} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.317159] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] b932d221-aca9-4853-aa9c-2d27981e878c/b932d221-aca9-4853-aa9c-2d27981e878c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1068.317379] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1068.317709] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1068.317709] env[61978]: value = "task-1395281" [ 1068.317709] env[61978]: _type = "Task" [ 1068.317709] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.317907] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6e568a9-ff50-46b0-bdd2-7003c90a4770 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.326405] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1068.326405] env[61978]: value = "task-1395282" [ 1068.326405] env[61978]: _type = "Task" [ 1068.326405] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.329705] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395281, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.336941] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395282, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.404202] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1068.404581] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1068.404889] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Deleting the datastore file [datastore2] f3a9f204-e4ed-49f1-85ef-8cea7377cf89 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1068.406522] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fca7a92b-3b69-4b63-9c70-475a4604b7cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.408649] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1068.408859] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1068.409122] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Deleting the datastore file [datastore2] 7e6178cf-b7be-46f8-8f8c-8605a09703c7 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1068.409383] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df31c7c6-df27-4db6-b4d0-aadb7d625e7b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.415327] env[61978]: DEBUG oslo_vmware.api [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1068.415327] env[61978]: value = "task-1395283" [ 1068.415327] env[61978]: _type = "Task" [ 1068.415327] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.417110] env[61978]: DEBUG oslo_vmware.api [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for the task: (returnval){ [ 1068.417110] env[61978]: value = "task-1395284" [ 1068.417110] env[61978]: _type = "Task" [ 1068.417110] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.427583] env[61978]: DEBUG oslo_vmware.api [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395283, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.430574] env[61978]: DEBUG oslo_vmware.api [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395284, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.659249] env[61978]: DEBUG oslo_concurrency.lockutils [None req-96853d41-fd28-43f2-aa5a-48156a1b6a85 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "5d9556d2-fcdd-416f-8f16-0fb271ff4ca5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.072s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.750169] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52066424-1e90-d0e9-520a-2b47e47056b0, 'name': SearchDatastore_Task, 'duration_secs': 0.037753} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.750948] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d4de798-0266-4101-8955-ff5db4eb6dbb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.757843] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1068.757843] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5242cd06-55f3-4e39-f9e5-9b130a44fb82" [ 1068.757843] env[61978]: _type = "Task" [ 1068.757843] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.765412] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5242cd06-55f3-4e39-f9e5-9b130a44fb82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.828817] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395281, 'name': ReconfigVM_Task, 'duration_secs': 0.364781} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.829177] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Reconfigured VM instance instance-0000003e to attach disk [datastore2] a0ea73d1-a613-4403-8527-a8b81a619adf/a0ea73d1-a613-4403-8527-a8b81a619adf.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1068.829878] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8bbbc38c-eceb-4f75-8039-9dd28b7b8890 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.840194] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395282, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083803} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.841334] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1068.841670] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1068.841670] env[61978]: value = "task-1395285" [ 1068.841670] env[61978]: _type = "Task" [ 1068.841670] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.842350] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73f4ac8-f59c-45d0-8458-403918f8a36e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.852220] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395285, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.866925] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] b932d221-aca9-4853-aa9c-2d27981e878c/b932d221-aca9-4853-aa9c-2d27981e878c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1068.867260] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb84d90b-b50b-4a7c-acb4-71922269ef65 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.887013] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1068.887013] env[61978]: value = "task-1395286" [ 1068.887013] env[61978]: _type = "Task" [ 1068.887013] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.896797] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395286, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.930773] env[61978]: DEBUG oslo_vmware.api [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395283, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191663} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.933989] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.934325] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1068.934551] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1068.934744] env[61978]: INFO nova.compute.manager [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Took 2.14 seconds to destroy the instance on the hypervisor. [ 1068.934991] env[61978]: DEBUG oslo.service.loopingcall [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1068.935222] env[61978]: DEBUG oslo_vmware.api [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Task: {'id': task-1395284, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201723} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.935615] env[61978]: DEBUG nova.compute.manager [-] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1068.935737] env[61978]: DEBUG nova.network.neutron [-] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1068.937401] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.937594] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1068.937777] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1068.937955] env[61978]: INFO nova.compute.manager [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Took 2.07 seconds to destroy the instance on the hypervisor. [ 1068.938219] env[61978]: DEBUG oslo.service.loopingcall [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1068.938715] env[61978]: DEBUG nova.compute.manager [-] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1068.938797] env[61978]: DEBUG nova.network.neutron [-] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1069.271743] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5242cd06-55f3-4e39-f9e5-9b130a44fb82, 'name': SearchDatastore_Task, 'duration_secs': 0.009525} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.275929] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.276444] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f4034944-3a9d-4e14-a545-0bf574465e0b/f4034944-3a9d-4e14-a545-0bf574465e0b.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1069.277095] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3d1adc9-e7c9-4a54-995f-0086230798b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.285622] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1069.285622] env[61978]: value = "task-1395287" [ 1069.285622] env[61978]: _type = "Task" [ 1069.285622] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.300023] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395287, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.358034] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395285, 'name': Rename_Task, 'duration_secs': 0.144374} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.358034] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1069.360112] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09e3722d-ac33-4df2-bc2f-d215a3e237b2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.366301] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1069.366301] env[61978]: value = "task-1395288" [ 1069.366301] env[61978]: _type = "Task" [ 1069.366301] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.382321] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395288, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.401831] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395286, 'name': ReconfigVM_Task, 'duration_secs': 0.26962} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.401831] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Reconfigured VM instance instance-0000003f to attach disk [datastore2] b932d221-aca9-4853-aa9c-2d27981e878c/b932d221-aca9-4853-aa9c-2d27981e878c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1069.401831] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7ed1436-7707-47f3-8ef6-777dcfa3dbc6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.408918] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1069.408918] env[61978]: value = "task-1395289" [ 1069.408918] env[61978]: _type = "Task" [ 1069.408918] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.427584] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395289, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.439572] env[61978]: DEBUG nova.compute.manager [req-b2990a36-d562-49b7-b8db-7cd52c9808d5 req-dc3e27b7-7aed-49c1-856b-246049777f37 service nova] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Received event network-vif-deleted-3e20714f-6e99-445a-af4a-2d7a05b46f72 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1069.439796] env[61978]: INFO nova.compute.manager [req-b2990a36-d562-49b7-b8db-7cd52c9808d5 req-dc3e27b7-7aed-49c1-856b-246049777f37 service nova] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Neutron deleted interface 3e20714f-6e99-445a-af4a-2d7a05b46f72; detaching it from the instance and deleting it from the info cache [ 1069.440073] env[61978]: DEBUG nova.network.neutron [req-b2990a36-d562-49b7-b8db-7cd52c9808d5 req-dc3e27b7-7aed-49c1-856b-246049777f37 service nova] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.670478] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99ba28f-720c-4bdb-9a56-207661e79333 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.683406] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c23dea-4c87-4d51-8032-cf43b29aa705 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.723925] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e9b631-8172-4fb9-a889-0629ebf90bec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.736563] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13bdab35-d571-4d1f-b53b-7ffde717295d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.753968] env[61978]: DEBUG nova.compute.provider_tree [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.801152] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395287, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.887026] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395288, 'name': PowerOnVM_Task} progress is 71%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.918526] env[61978]: DEBUG nova.network.neutron [-] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.922302] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395289, 'name': Rename_Task, 'duration_secs': 0.164023} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.922913] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1069.923123] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b42cf238-7116-42d1-aaf8-402977eca1af {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.930597] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1069.930597] env[61978]: value = "task-1395290" [ 1069.930597] env[61978]: _type = "Task" [ 1069.930597] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.942149] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395290, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.946033] env[61978]: DEBUG nova.network.neutron [-] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.947761] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a72e7f86-f50a-4240-8732-eb0fca76688c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.959914] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f19f4e1-8d97-4733-87dd-703c541609b7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.996805] env[61978]: DEBUG nova.compute.manager [req-b2990a36-d562-49b7-b8db-7cd52c9808d5 req-dc3e27b7-7aed-49c1-856b-246049777f37 service nova] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Detach interface failed, port_id=3e20714f-6e99-445a-af4a-2d7a05b46f72, reason: Instance f3a9f204-e4ed-49f1-85ef-8cea7377cf89 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1070.260922] env[61978]: DEBUG nova.scheduler.client.report [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1070.296730] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395287, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600613} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.297031] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f4034944-3a9d-4e14-a545-0bf574465e0b/f4034944-3a9d-4e14-a545-0bf574465e0b.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1070.297236] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1070.297496] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-191150f7-a150-4f21-a4d5-ede821a751ce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.306027] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1070.306027] env[61978]: value = "task-1395291" [ 1070.306027] env[61978]: _type = "Task" [ 1070.306027] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.313295] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395291, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.376306] env[61978]: DEBUG oslo_vmware.api [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395288, 'name': PowerOnVM_Task, 'duration_secs': 0.798466} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.376613] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1070.376794] env[61978]: INFO nova.compute.manager [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Took 8.61 seconds to spawn the instance on the hypervisor. [ 1070.376980] env[61978]: DEBUG nova.compute.manager [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1070.377870] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfa7cd8-8006-4b7e-848d-350ed3e0c8a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.424324] env[61978]: INFO nova.compute.manager [-] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Took 1.49 seconds to deallocate network for instance. [ 1070.439822] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395290, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.448811] env[61978]: INFO nova.compute.manager [-] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Took 1.51 seconds to deallocate network for instance. [ 1070.770023] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.770023] env[61978]: DEBUG nova.compute.manager [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1070.770940] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.373s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.771321] env[61978]: DEBUG nova.objects.instance [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lazy-loading 'resources' on Instance uuid 0d48ae5d-7cc8-42b3-a993-44636e9cb171 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.815347] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395291, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105816} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.815915] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1070.816794] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677afd41-c287-4688-aa35-40250bff7a2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.836699] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] f4034944-3a9d-4e14-a545-0bf574465e0b/f4034944-3a9d-4e14-a545-0bf574465e0b.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1070.837192] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38a6313f-6b0b-4176-99c5-8166302fb511 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.857374] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1070.857374] env[61978]: value = "task-1395292" [ 1070.857374] env[61978]: _type = "Task" [ 1070.857374] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.865852] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395292, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.894660] env[61978]: INFO nova.compute.manager [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Took 23.90 seconds to build instance. [ 1070.933019] env[61978]: DEBUG oslo_concurrency.lockutils [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.945537] env[61978]: DEBUG oslo_vmware.api [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395290, 'name': PowerOnVM_Task, 'duration_secs': 0.815272} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.946277] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1070.948129] env[61978]: INFO nova.compute.manager [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Took 6.44 seconds to spawn the instance on the hypervisor. [ 1070.948129] env[61978]: DEBUG nova.compute.manager [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1070.948129] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147d564d-983e-4097-8b7d-0c6c9b42d93a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.958155] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.275047] env[61978]: DEBUG nova.compute.utils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1071.278799] env[61978]: DEBUG nova.compute.manager [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1071.278971] env[61978]: DEBUG nova.network.neutron [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1071.332436] env[61978]: DEBUG nova.policy [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '394d03fc54234c369ad2e1255eee9c82', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c31ffdd4e70d40ecbbb56777f9422a52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1071.369049] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395292, 'name': ReconfigVM_Task, 'duration_secs': 0.284419} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.369167] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Reconfigured VM instance instance-00000040 to attach disk [datastore2] f4034944-3a9d-4e14-a545-0bf574465e0b/f4034944-3a9d-4e14-a545-0bf574465e0b.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1071.369853] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9d3164e-176f-4041-b412-5e3ba010b6af {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.377038] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1071.377038] env[61978]: value = "task-1395293" [ 1071.377038] env[61978]: _type = "Task" [ 1071.377038] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.386546] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395293, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.397970] env[61978]: DEBUG oslo_concurrency.lockutils [None req-503cd8a1-4b4f-4d43-8ef8-34ad0f061130 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "a0ea73d1-a613-4403-8527-a8b81a619adf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.421s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.475244] env[61978]: INFO nova.compute.manager [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Took 23.41 seconds to build instance. [ 1071.595753] env[61978]: DEBUG oslo_concurrency.lockutils [None req-475c2191-501d-4b85-80c1-0e13d1c18f90 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "a0ea73d1-a613-4403-8527-a8b81a619adf" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.597019] env[61978]: DEBUG oslo_concurrency.lockutils [None req-475c2191-501d-4b85-80c1-0e13d1c18f90 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "a0ea73d1-a613-4403-8527-a8b81a619adf" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.597019] env[61978]: DEBUG nova.compute.manager [None req-475c2191-501d-4b85-80c1-0e13d1c18f90 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1071.597465] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185965f9-fae3-4d1e-9b15-a9a9175c7dd2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.608751] env[61978]: DEBUG nova.compute.manager [None req-475c2191-501d-4b85-80c1-0e13d1c18f90 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61978) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1071.608751] env[61978]: DEBUG nova.objects.instance [None req-475c2191-501d-4b85-80c1-0e13d1c18f90 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lazy-loading 'flavor' on Instance uuid a0ea73d1-a613-4403-8527-a8b81a619adf {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1071.610846] env[61978]: DEBUG nova.compute.manager [req-6027050c-c3d3-4ef6-b709-d5f3e62780ec req-858d4aac-7ad8-43a3-b4fb-91197469ae82 service nova] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Received event network-vif-deleted-141945f2-7f26-43d2-8d56-5880f14da310 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1071.621341] env[61978]: DEBUG nova.network.neutron [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Successfully created port: c09a5182-eea7-4874-aa47-480a81863dd3 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1071.781335] env[61978]: DEBUG nova.compute.manager [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1071.820801] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d06b2c-e371-4f7d-9342-f0fbc217a98d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.831815] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a13da69-f895-4287-949f-26204ae96c39 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.866607] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a590d1a7-6ef5-4df9-b232-7b3354796978 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.874844] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67a43e1-716e-440a-a090-46a5498fac3e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.890741] env[61978]: DEBUG nova.compute.provider_tree [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.894827] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395293, 'name': Rename_Task, 'duration_secs': 0.137434} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.895321] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1071.895560] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0be8c990-c6a0-4e28-b6d4-3cac93f21c2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.901856] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1071.901856] env[61978]: value = "task-1395294" [ 1071.901856] env[61978]: _type = "Task" [ 1071.901856] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.910628] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395294, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.976869] env[61978]: DEBUG oslo_concurrency.lockutils [None req-035141c2-f3a3-4181-aca5-2b054e7afac2 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "b932d221-aca9-4853-aa9c-2d27981e878c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.925s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.114897] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-475c2191-501d-4b85-80c1-0e13d1c18f90 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1072.115223] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5fe8ef9e-06bb-4af4-8bde-32d16d5473f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.122892] env[61978]: DEBUG oslo_vmware.api [None req-475c2191-501d-4b85-80c1-0e13d1c18f90 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1072.122892] env[61978]: value = "task-1395295" [ 1072.122892] env[61978]: _type = "Task" [ 1072.122892] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.131867] env[61978]: DEBUG oslo_vmware.api [None req-475c2191-501d-4b85-80c1-0e13d1c18f90 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395295, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.319200] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.319522] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.397054] env[61978]: DEBUG nova.scheduler.client.report [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1072.411045] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395294, 'name': PowerOnVM_Task} progress is 71%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.632185] env[61978]: DEBUG oslo_vmware.api [None req-475c2191-501d-4b85-80c1-0e13d1c18f90 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395295, 'name': PowerOffVM_Task, 'duration_secs': 0.454933} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.632478] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-475c2191-501d-4b85-80c1-0e13d1c18f90 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1072.632662] env[61978]: DEBUG nova.compute.manager [None req-475c2191-501d-4b85-80c1-0e13d1c18f90 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1072.633532] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06dbde1e-3232-417e-b08f-67cb3b9b133e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.792390] env[61978]: DEBUG nova.compute.manager [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1072.818456] env[61978]: DEBUG nova.virt.hardware [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1072.818714] env[61978]: DEBUG nova.virt.hardware [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1072.818878] env[61978]: DEBUG nova.virt.hardware [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1072.819076] env[61978]: DEBUG nova.virt.hardware [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1072.819232] env[61978]: DEBUG nova.virt.hardware [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1072.819385] env[61978]: DEBUG nova.virt.hardware [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1072.819602] env[61978]: DEBUG nova.virt.hardware [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1072.819765] env[61978]: DEBUG nova.virt.hardware [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1072.819934] env[61978]: DEBUG nova.virt.hardware [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1072.820118] env[61978]: DEBUG nova.virt.hardware [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1072.820302] env[61978]: DEBUG nova.virt.hardware [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1072.821196] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbc2ca1-f851-473c-9975-87b6900ac42d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.823908] env[61978]: DEBUG nova.compute.manager [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1072.832115] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193d8aac-95bf-4469-9e43-70eb7ec0787b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.902497] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.131s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.904843] env[61978]: DEBUG oslo_concurrency.lockutils [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 21.152s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.915395] env[61978]: DEBUG oslo_vmware.api [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395294, 'name': PowerOnVM_Task, 'duration_secs': 0.886486} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.915659] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1072.915868] env[61978]: INFO nova.compute.manager [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Took 5.84 seconds to spawn the instance on the hypervisor. [ 1072.916068] env[61978]: DEBUG nova.compute.manager [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1072.916853] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065094b3-e53e-473d-a63a-c3264737e661 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.926660] env[61978]: INFO nova.scheduler.client.report [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleted allocations for instance 0d48ae5d-7cc8-42b3-a993-44636e9cb171 [ 1073.139173] env[61978]: DEBUG nova.network.neutron [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Successfully updated port: c09a5182-eea7-4874-aa47-480a81863dd3 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1073.148078] env[61978]: DEBUG oslo_concurrency.lockutils [None req-475c2191-501d-4b85-80c1-0e13d1c18f90 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "a0ea73d1-a613-4403-8527-a8b81a619adf" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.552s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.909117] env[61978]: INFO nova.compute.claims [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.917151] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "refresh_cache-a1087abd-28d1-40ac-96ab-dc38392d027c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.917282] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "refresh_cache-a1087abd-28d1-40ac-96ab-dc38392d027c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.917428] env[61978]: DEBUG nova.network.neutron [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1073.921301] env[61978]: DEBUG nova.compute.manager [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Received event network-vif-plugged-c09a5182-eea7-4874-aa47-480a81863dd3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1073.921501] env[61978]: DEBUG oslo_concurrency.lockutils [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] Acquiring lock "a1087abd-28d1-40ac-96ab-dc38392d027c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.921696] env[61978]: DEBUG oslo_concurrency.lockutils [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] Lock "a1087abd-28d1-40ac-96ab-dc38392d027c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.921853] env[61978]: DEBUG oslo_concurrency.lockutils [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] Lock "a1087abd-28d1-40ac-96ab-dc38392d027c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.922026] env[61978]: DEBUG nova.compute.manager [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] No waiting events found dispatching network-vif-plugged-c09a5182-eea7-4874-aa47-480a81863dd3 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1073.922215] env[61978]: WARNING nova.compute.manager [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Received unexpected event network-vif-plugged-c09a5182-eea7-4874-aa47-480a81863dd3 for instance with vm_state building and task_state spawning. [ 1073.922341] env[61978]: DEBUG nova.compute.manager [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Received event network-changed-c09a5182-eea7-4874-aa47-480a81863dd3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1073.922486] env[61978]: DEBUG nova.compute.manager [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Refreshing instance network info cache due to event network-changed-c09a5182-eea7-4874-aa47-480a81863dd3. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1073.922649] env[61978]: DEBUG oslo_concurrency.lockutils [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] Acquiring lock "refresh_cache-a1087abd-28d1-40ac-96ab-dc38392d027c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.926041] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f1196e3a-850a-487d-9105-88e8b7279dfa tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "0d48ae5d-7cc8-42b3-a993-44636e9cb171" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.078s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.947823] env[61978]: INFO nova.compute.manager [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Took 25.25 seconds to build instance. [ 1073.950432] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.427094] env[61978]: INFO nova.compute.resource_tracker [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating resource usage from migration dfe6158b-18dd-4d7f-8f9a-1d1b35f9479f [ 1074.438621] env[61978]: DEBUG oslo_concurrency.lockutils [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "a0ea73d1-a613-4403-8527-a8b81a619adf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.438874] env[61978]: DEBUG oslo_concurrency.lockutils [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "a0ea73d1-a613-4403-8527-a8b81a619adf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.439101] env[61978]: DEBUG oslo_concurrency.lockutils [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "a0ea73d1-a613-4403-8527-a8b81a619adf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.440733] env[61978]: DEBUG oslo_concurrency.lockutils [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "a0ea73d1-a613-4403-8527-a8b81a619adf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.440733] env[61978]: DEBUG oslo_concurrency.lockutils [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "a0ea73d1-a613-4403-8527-a8b81a619adf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.441695] env[61978]: INFO nova.compute.manager [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Terminating instance [ 1074.443460] env[61978]: DEBUG nova.compute.manager [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1074.443739] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1074.444504] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f1b58f-74c1-454b-a21b-1b94a9c54e0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.449831] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9484c0a1-7742-4c54-8eb7-d80130e1e7e4 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "f4034944-3a9d-4e14-a545-0bf574465e0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.771s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.455188] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1074.455467] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ac4234e-d70a-48a3-8880-3eaf388ddfee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.458238] env[61978]: DEBUG nova.network.neutron [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1074.532042] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1074.532042] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1074.532042] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleting the datastore file [datastore2] a0ea73d1-a613-4403-8527-a8b81a619adf {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1074.532042] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d213cf12-151b-463d-976a-e589c220923d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.539406] env[61978]: DEBUG oslo_vmware.api [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1074.539406] env[61978]: value = "task-1395297" [ 1074.539406] env[61978]: _type = "Task" [ 1074.539406] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.547224] env[61978]: DEBUG oslo_vmware.api [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395297, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.584863] env[61978]: INFO nova.compute.manager [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Rebuilding instance [ 1074.620558] env[61978]: DEBUG nova.network.neutron [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Updating instance_info_cache with network_info: [{"id": "c09a5182-eea7-4874-aa47-480a81863dd3", "address": "fa:16:3e:2b:c2:dd", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc09a5182-ee", "ovs_interfaceid": "c09a5182-eea7-4874-aa47-480a81863dd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.650850] env[61978]: DEBUG nova.compute.manager [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1074.654925] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c9b1e6-18f6-40c0-b274-ae102782e476 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.870499] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1aaee9-c4aa-4819-b345-121f6cb52538 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.879711] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36532eb1-7eda-41a2-b9ec-12251cb38e66 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.908486] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd11c23-cfd8-4f6e-9556-29fb97691e72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.915714] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90be66ee-e2c4-40e8-85f3-545cb082a4bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.930686] env[61978]: DEBUG nova.compute.provider_tree [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.049348] env[61978]: DEBUG oslo_vmware.api [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395297, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202767} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.050019] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1075.050019] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1075.050019] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1075.050192] env[61978]: INFO nova.compute.manager [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1075.050408] env[61978]: DEBUG oslo.service.loopingcall [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1075.050608] env[61978]: DEBUG nova.compute.manager [-] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1075.050701] env[61978]: DEBUG nova.network.neutron [-] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1075.127464] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "refresh_cache-a1087abd-28d1-40ac-96ab-dc38392d027c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1075.127801] env[61978]: DEBUG nova.compute.manager [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Instance network_info: |[{"id": "c09a5182-eea7-4874-aa47-480a81863dd3", "address": "fa:16:3e:2b:c2:dd", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc09a5182-ee", "ovs_interfaceid": "c09a5182-eea7-4874-aa47-480a81863dd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1075.128144] env[61978]: DEBUG oslo_concurrency.lockutils [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] Acquired lock "refresh_cache-a1087abd-28d1-40ac-96ab-dc38392d027c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.128311] env[61978]: DEBUG nova.network.neutron [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Refreshing network info cache for port c09a5182-eea7-4874-aa47-480a81863dd3 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1075.129487] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:c2:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '411f389f-4e4f-4450-891e-38944cac6135', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c09a5182-eea7-4874-aa47-480a81863dd3', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1075.137209] env[61978]: DEBUG oslo.service.loopingcall [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1075.137668] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1075.138448] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef66e4bc-564d-4a8c-9831-75f71c16fc56 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.158419] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1075.158419] env[61978]: value = "task-1395298" [ 1075.158419] env[61978]: _type = "Task" [ 1075.158419] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.164322] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1075.168342] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77f8b9a1-2ccc-457c-a54f-ae65e97a8fa7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.170029] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395298, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.174585] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1075.174585] env[61978]: value = "task-1395299" [ 1075.174585] env[61978]: _type = "Task" [ 1075.174585] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.185628] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395299, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.435075] env[61978]: DEBUG nova.scheduler.client.report [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1075.669867] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395298, 'name': CreateVM_Task, 'duration_secs': 0.306504} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.672014] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1075.672732] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1075.672910] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.673257] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1075.673935] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-392fab9d-abbc-4752-abd9-7c55a7c6bdd9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.681431] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1075.681431] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52df1cd0-3ea1-ba21-6583-b29f3e00852d" [ 1075.681431] env[61978]: _type = "Task" [ 1075.681431] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.684583] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395299, 'name': PowerOffVM_Task, 'duration_secs': 0.123019} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.687883] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1075.687883] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1075.688674] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2af709-42e6-44f7-a0a8-b53b7ca2285d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.696461] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52df1cd0-3ea1-ba21-6583-b29f3e00852d, 'name': SearchDatastore_Task, 'duration_secs': 0.01027} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.698507] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1075.698740] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1075.699166] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1075.699166] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.699312] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1075.699564] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1075.699789] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c0fb2d3-9ca9-44f2-bdac-ed0c3def2aa6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.701424] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aba7f9d6-ede7-4a3d-8694-dfb93a1dd9fb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.709767] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1075.709971] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1075.710677] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-570a58a7-0381-4831-8061-d1716589cdf6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.718667] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1075.718667] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d5b968-12d1-72ea-0bbf-053aa9baa833" [ 1075.718667] env[61978]: _type = "Task" [ 1075.718667] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.726288] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d5b968-12d1-72ea-0bbf-053aa9baa833, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.727401] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1075.727997] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1075.727997] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Deleting the datastore file [datastore2] f4034944-3a9d-4e14-a545-0bf574465e0b {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1075.727997] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-308601f1-a3eb-4e57-8f6c-5e4d7ce91af7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.733091] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1075.733091] env[61978]: value = "task-1395301" [ 1075.733091] env[61978]: _type = "Task" [ 1075.733091] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.740531] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.743890] env[61978]: DEBUG nova.compute.manager [req-21ece065-a335-47aa-99c0-4d0db1a5d59b req-ca76fe8f-be01-4e09-a64b-f3eddd69936d service nova] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Received event network-vif-deleted-6d7b8d71-cb74-42e7-a945-feaf0769d81e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1075.744126] env[61978]: INFO nova.compute.manager [req-21ece065-a335-47aa-99c0-4d0db1a5d59b req-ca76fe8f-be01-4e09-a64b-f3eddd69936d service nova] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Neutron deleted interface 6d7b8d71-cb74-42e7-a945-feaf0769d81e; detaching it from the instance and deleting it from the info cache [ 1075.744419] env[61978]: DEBUG nova.network.neutron [req-21ece065-a335-47aa-99c0-4d0db1a5d59b req-ca76fe8f-be01-4e09-a64b-f3eddd69936d service nova] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.801328] env[61978]: DEBUG nova.network.neutron [-] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.850037] env[61978]: DEBUG nova.network.neutron [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Updated VIF entry in instance network info cache for port c09a5182-eea7-4874-aa47-480a81863dd3. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1075.850424] env[61978]: DEBUG nova.network.neutron [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Updating instance_info_cache with network_info: [{"id": "c09a5182-eea7-4874-aa47-480a81863dd3", "address": "fa:16:3e:2b:c2:dd", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc09a5182-ee", "ovs_interfaceid": "c09a5182-eea7-4874-aa47-480a81863dd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.940245] env[61978]: DEBUG oslo_concurrency.lockutils [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.035s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.940532] env[61978]: INFO nova.compute.manager [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Migrating [ 1075.947304] env[61978]: DEBUG oslo_concurrency.lockutils [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.946s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.947530] env[61978]: DEBUG nova.objects.instance [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lazy-loading 'resources' on Instance uuid 3ee1023c-7837-4db0-88d4-f88c9a43fba3 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.231771] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d5b968-12d1-72ea-0bbf-053aa9baa833, 'name': SearchDatastore_Task, 'duration_secs': 0.010792} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.232389] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acb6bfbf-2390-4fe7-a0ce-46502c3336fb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.246099] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089522} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.246099] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1076.246099] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526c65ab-1953-b3f4-24b1-b6d5c71ca440" [ 1076.246099] env[61978]: _type = "Task" [ 1076.246099] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.246099] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1076.246099] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1076.246099] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1076.251951] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8152e49d-140b-4bdf-99b0-9a6ed3e56cf5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.260411] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526c65ab-1953-b3f4-24b1-b6d5c71ca440, 'name': SearchDatastore_Task, 'duration_secs': 0.009417} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.263387] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.263387] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a1087abd-28d1-40ac-96ab-dc38392d027c/a1087abd-28d1-40ac-96ab-dc38392d027c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1076.263618] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a215b1b-0021-41fc-aacb-3f289d740312 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.268016] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4981ca75-d057-4850-9d61-caf83c18126d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.285783] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1076.285783] env[61978]: value = "task-1395302" [ 1076.285783] env[61978]: _type = "Task" [ 1076.285783] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.295857] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395302, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.308973] env[61978]: INFO nova.compute.manager [-] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Took 1.26 seconds to deallocate network for instance. [ 1076.309398] env[61978]: DEBUG nova.compute.manager [req-21ece065-a335-47aa-99c0-4d0db1a5d59b req-ca76fe8f-be01-4e09-a64b-f3eddd69936d service nova] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Detach interface failed, port_id=6d7b8d71-cb74-42e7-a945-feaf0769d81e, reason: Instance a0ea73d1-a613-4403-8527-a8b81a619adf could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1076.353116] env[61978]: DEBUG oslo_concurrency.lockutils [req-b212f7e3-73ad-4bd0-b14a-b31a44cefaba req-9984c54b-f39a-48b5-b099-643992108990 service nova] Releasing lock "refresh_cache-a1087abd-28d1-40ac-96ab-dc38392d027c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.373610] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444c93e8-0a6a-4260-a3e5-2a6d029b81b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.382081] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5668f8-8969-4960-a98b-80e207efc51b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.453087] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9466502e-729d-4db9-96f1-c08100772903 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.453087] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83fb5203-7eac-410c-99ad-6a1bd4743e66 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.453087] env[61978]: DEBUG nova.compute.provider_tree [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.456392] env[61978]: DEBUG oslo_concurrency.lockutils [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.456641] env[61978]: DEBUG oslo_concurrency.lockutils [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.456873] env[61978]: DEBUG nova.network.neutron [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1076.796801] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395302, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486628} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.797095] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a1087abd-28d1-40ac-96ab-dc38392d027c/a1087abd-28d1-40ac-96ab-dc38392d027c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1076.797398] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1076.797620] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf7f5289-e927-4495-b66d-a441a638ebcc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.803888] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1076.803888] env[61978]: value = "task-1395303" [ 1076.803888] env[61978]: _type = "Task" [ 1076.803888] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.811549] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395303, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.817736] env[61978]: DEBUG oslo_concurrency.lockutils [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.937579] env[61978]: DEBUG nova.scheduler.client.report [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1077.187140] env[61978]: DEBUG nova.network.neutron [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance_info_cache with network_info: [{"id": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "address": "fa:16:3e:5c:ce:8c", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap394a8251-68", "ovs_interfaceid": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.286878] env[61978]: DEBUG nova.virt.hardware [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1077.286878] env[61978]: DEBUG nova.virt.hardware [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1077.286878] env[61978]: DEBUG nova.virt.hardware [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1077.287074] env[61978]: DEBUG nova.virt.hardware [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1077.287396] env[61978]: DEBUG nova.virt.hardware [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1077.287596] env[61978]: DEBUG nova.virt.hardware [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1077.287820] env[61978]: DEBUG nova.virt.hardware [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1077.287997] env[61978]: DEBUG nova.virt.hardware [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1077.288202] env[61978]: DEBUG nova.virt.hardware [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1077.288389] env[61978]: DEBUG nova.virt.hardware [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1077.288576] env[61978]: DEBUG nova.virt.hardware [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1077.289452] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b94076c-86b9-47ac-9ca9-36d9e1d462a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.297493] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47943d49-94c1-4263-9e92-cfad13dbb1b7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.310664] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Instance VIF info [] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1077.316315] env[61978]: DEBUG oslo.service.loopingcall [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1077.319422] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1077.319610] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a439a6c-a10e-4f4e-9987-458717ec6cfd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.337049] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395303, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077438} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.338121] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1077.338390] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1077.338390] env[61978]: value = "task-1395304" [ 1077.338390] env[61978]: _type = "Task" [ 1077.338390] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.339223] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce0b973-b44c-4baf-b9de-731358c683c1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.352611] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395304, 'name': CreateVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.369670] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] a1087abd-28d1-40ac-96ab-dc38392d027c/a1087abd-28d1-40ac-96ab-dc38392d027c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1077.369940] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36338f97-000e-460b-ba8d-8fe6daed38cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.387677] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1077.387677] env[61978]: value = "task-1395305" [ 1077.387677] env[61978]: _type = "Task" [ 1077.387677] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.395429] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395305, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.442669] env[61978]: DEBUG oslo_concurrency.lockutils [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.495s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.446488] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.219s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.449029] env[61978]: INFO nova.compute.claims [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1077.468635] env[61978]: INFO nova.scheduler.client.report [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Deleted allocations for instance 3ee1023c-7837-4db0-88d4-f88c9a43fba3 [ 1077.688589] env[61978]: DEBUG oslo_concurrency.lockutils [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.852259] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395304, 'name': CreateVM_Task, 'duration_secs': 0.362257} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.852502] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1077.853050] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1077.853278] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.853659] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1077.854377] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6a9463a-4bfe-448d-ac46-cdf3683c81dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.858960] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1077.858960] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5273a85f-7775-d6d2-3dd1-94b5b9e425fb" [ 1077.858960] env[61978]: _type = "Task" [ 1077.858960] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.865982] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5273a85f-7775-d6d2-3dd1-94b5b9e425fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.896015] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395305, 'name': ReconfigVM_Task, 'duration_secs': 0.341092} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.896299] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Reconfigured VM instance instance-00000041 to attach disk [datastore2] a1087abd-28d1-40ac-96ab-dc38392d027c/a1087abd-28d1-40ac-96ab-dc38392d027c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1077.896966] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1c62a52-5323-4487-a80b-abe2bfb872b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.902934] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1077.902934] env[61978]: value = "task-1395306" [ 1077.902934] env[61978]: _type = "Task" [ 1077.902934] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.910022] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395306, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.976304] env[61978]: DEBUG oslo_concurrency.lockutils [None req-970792e0-1a79-4b67-b4da-b5767ead1ba6 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "3ee1023c-7837-4db0-88d4-f88c9a43fba3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.949s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.369491] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5273a85f-7775-d6d2-3dd1-94b5b9e425fb, 'name': SearchDatastore_Task, 'duration_secs': 0.070138} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.369825] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.369955] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1078.370193] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.371013] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.371013] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1078.371013] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68d8bb03-41c5-4ce1-89cd-056487db434b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.380514] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1078.380716] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1078.381464] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d632b92-12a3-460b-9f8f-e0a0c443ace8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.388030] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1078.388030] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52297d98-1731-bdfd-defa-274c1656250f" [ 1078.388030] env[61978]: _type = "Task" [ 1078.388030] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.394376] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52297d98-1731-bdfd-defa-274c1656250f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.413063] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395306, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.891827] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e089087b-1b15-469e-9d7c-cc93df6d9e0b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.902885] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5beff1f2-827a-4ccc-8f3d-32f60af633e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.905972] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52297d98-1731-bdfd-defa-274c1656250f, 'name': SearchDatastore_Task, 'duration_secs': 0.023582} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.910264] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1adf99d7-9d70-47e0-bf0a-4db5005af4df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.939488] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bdf0420-f38f-4528-830e-7c65e4dc0870 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.947208] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1078.947208] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522172d2-85f6-1176-a470-f5b6c69879b4" [ 1078.947208] env[61978]: _type = "Task" [ 1078.947208] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.947526] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395306, 'name': Rename_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.954663] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27754774-e707-456d-8e4e-740fd1c4efcf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.962544] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522172d2-85f6-1176-a470-f5b6c69879b4, 'name': SearchDatastore_Task, 'duration_secs': 0.010128} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.964131] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.964131] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f4034944-3a9d-4e14-a545-0bf574465e0b/f4034944-3a9d-4e14-a545-0bf574465e0b.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1078.964131] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9619b3b2-3cb0-4aa9-bff6-9a2fb9e00fec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.974078] env[61978]: DEBUG nova.compute.provider_tree [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.980186] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1078.980186] env[61978]: value = "task-1395308" [ 1078.980186] env[61978]: _type = "Task" [ 1078.980186] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.990887] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.206447] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147ac002-8511-4bb1-8344-1e16a02d85eb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.226151] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance 'c17c986e-c008-4414-8dd1-4ea836458048' progress to 0 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1079.423431] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395306, 'name': Rename_Task, 'duration_secs': 1.040525} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.423700] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1079.424179] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6219f6c-171f-42cf-91d4-f670e9507e05 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.433898] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1079.433898] env[61978]: value = "task-1395309" [ 1079.433898] env[61978]: _type = "Task" [ 1079.433898] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.444273] env[61978]: INFO nova.compute.manager [None req-43ff0d05-f39d-482f-b070-7d525e74f005 tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Get console output [ 1079.444907] env[61978]: WARNING nova.virt.vmwareapi.driver [None req-43ff0d05-f39d-482f-b070-7d525e74f005 tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] The console log is missing. Check your VSPC configuration [ 1079.454588] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395309, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.478164] env[61978]: DEBUG nova.scheduler.client.report [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1079.498898] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395308, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.732546] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1079.732875] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4de7409-de0f-413d-8b75-08d80c5a2c34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.739659] env[61978]: DEBUG oslo_vmware.api [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1079.739659] env[61978]: value = "task-1395310" [ 1079.739659] env[61978]: _type = "Task" [ 1079.739659] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.747257] env[61978]: DEBUG oslo_vmware.api [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395310, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.944540] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395309, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.990576] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.544s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.991241] env[61978]: DEBUG nova.compute.manager [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1079.997948] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.084s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.998269] env[61978]: DEBUG nova.objects.instance [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lazy-loading 'resources' on Instance uuid 85fc5af8-454d-4042-841a-945b7e84eb6c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.999657] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395308, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.549864} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.001332] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f4034944-3a9d-4e14-a545-0bf574465e0b/f4034944-3a9d-4e14-a545-0bf574465e0b.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1080.001593] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1080.004194] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3468a38d-4202-404b-a87d-987ad19bf5bf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.012569] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1080.012569] env[61978]: value = "task-1395311" [ 1080.012569] env[61978]: _type = "Task" [ 1080.012569] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.021424] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395311, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.248983] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1080.249218] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance 'c17c986e-c008-4414-8dd1-4ea836458048' progress to 17 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1080.445535] env[61978]: DEBUG oslo_vmware.api [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395309, 'name': PowerOnVM_Task, 'duration_secs': 0.648491} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.445846] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1080.446081] env[61978]: INFO nova.compute.manager [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Took 7.65 seconds to spawn the instance on the hypervisor. [ 1080.446273] env[61978]: DEBUG nova.compute.manager [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1080.447041] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5a3090-48ef-4731-90b6-0ded68b72cd2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.504259] env[61978]: DEBUG nova.compute.utils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1080.507170] env[61978]: DEBUG nova.compute.manager [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1080.507575] env[61978]: DEBUG nova.network.neutron [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1080.510326] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Acquiring lock "eb7cb200-c162-4e92-8916-6d9abd5cf34d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.510990] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Lock "eb7cb200-c162-4e92-8916-6d9abd5cf34d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.510990] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Acquiring lock "eb7cb200-c162-4e92-8916-6d9abd5cf34d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.511239] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Lock "eb7cb200-c162-4e92-8916-6d9abd5cf34d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.511481] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Lock "eb7cb200-c162-4e92-8916-6d9abd5cf34d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.513723] env[61978]: INFO nova.compute.manager [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Terminating instance [ 1080.516570] env[61978]: DEBUG nova.compute.manager [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1080.516846] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1080.521723] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10be467-d26a-4b4c-9245-fa1107e8d056 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.530503] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395311, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076172} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.532877] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1080.533302] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1080.534103] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb1d418-c5cd-4849-abc7-88040c150644 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.536760] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ea3de0a-91cf-4d16-a2d8-f923bca5dcee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.567066] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] f4034944-3a9d-4e14-a545-0bf574465e0b/f4034944-3a9d-4e14-a545-0bf574465e0b.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1080.569978] env[61978]: DEBUG nova.policy [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dbbb8f4f732c4ff0a036241104b6b87d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ef38e57bbc04a7ebf167286cdffd07c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1080.574485] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8645cba9-74fa-452c-a529-9c3110c9356a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.588153] env[61978]: DEBUG oslo_vmware.api [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Waiting for the task: (returnval){ [ 1080.588153] env[61978]: value = "task-1395312" [ 1080.588153] env[61978]: _type = "Task" [ 1080.588153] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.594615] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1080.594615] env[61978]: value = "task-1395313" [ 1080.594615] env[61978]: _type = "Task" [ 1080.594615] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.597760] env[61978]: DEBUG oslo_vmware.api [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1395312, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.607966] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395313, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.758246] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1080.758557] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1080.758793] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1080.759279] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1080.759279] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1080.759365] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1080.759587] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1080.759757] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1080.759929] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1080.760140] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1080.760367] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1080.766635] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e84b3ed5-bdcd-4362-ab14-876fe89d9844 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.791194] env[61978]: DEBUG oslo_vmware.api [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1080.791194] env[61978]: value = "task-1395314" [ 1080.791194] env[61978]: _type = "Task" [ 1080.791194] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.802334] env[61978]: DEBUG oslo_vmware.api [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395314, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.871932] env[61978]: DEBUG nova.network.neutron [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Successfully created port: 7c209a6b-d93f-45a5-b1df-3b7244e3a624 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1080.966427] env[61978]: INFO nova.compute.manager [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Took 29.83 seconds to build instance. [ 1081.008164] env[61978]: DEBUG nova.compute.manager [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1081.106205] env[61978]: DEBUG oslo_vmware.api [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1395312, 'name': PowerOffVM_Task, 'duration_secs': 0.357825} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.107205] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1081.107205] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1081.107205] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88d04d63-b8ce-4e17-89fd-1cda88864cfe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.111872] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395313, 'name': ReconfigVM_Task, 'duration_secs': 0.327685} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.112138] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Reconfigured VM instance instance-00000040 to attach disk [datastore2] f4034944-3a9d-4e14-a545-0bf574465e0b/f4034944-3a9d-4e14-a545-0bf574465e0b.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1081.113912] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-656e97c0-0391-434a-8f99-6cfca3569b83 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.122412] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1081.122412] env[61978]: value = "task-1395316" [ 1081.122412] env[61978]: _type = "Task" [ 1081.122412] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.130635] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395316, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.144692] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc7cc2f-914a-4d34-81ad-478475f23e12 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.154198] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8f0911-8d27-4b59-acca-1adc80b39bf0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.192674] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296ff47a-0424-4d7b-8bf5-90c5c694e1e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.195247] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1081.195456] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1081.195639] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Deleting the datastore file [datastore2] eb7cb200-c162-4e92-8916-6d9abd5cf34d {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1081.196205] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0b87356-7cdb-442a-b7d6-b56c9de98db3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.203871] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd99182-56c4-47fe-9282-a555277d7bb1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.207541] env[61978]: DEBUG oslo_vmware.api [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Waiting for the task: (returnval){ [ 1081.207541] env[61978]: value = "task-1395317" [ 1081.207541] env[61978]: _type = "Task" [ 1081.207541] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.218183] env[61978]: DEBUG nova.compute.provider_tree [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1081.223802] env[61978]: DEBUG oslo_vmware.api [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1395317, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.302525] env[61978]: DEBUG oslo_vmware.api [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395314, 'name': ReconfigVM_Task, 'duration_secs': 0.189266} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.302842] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance 'c17c986e-c008-4414-8dd1-4ea836458048' progress to 33 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1081.468181] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7c7940ed-5d44-4164-800e-786dbf63983b tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "a1087abd-28d1-40ac-96ab-dc38392d027c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.349s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.633210] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395316, 'name': Rename_Task, 'duration_secs': 0.142436} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.633354] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1081.633532] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59432a89-83b1-40bd-a53f-9e3b3c5a276f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.640363] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1081.640363] env[61978]: value = "task-1395318" [ 1081.640363] env[61978]: _type = "Task" [ 1081.640363] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.654409] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395318, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.717020] env[61978]: DEBUG oslo_vmware.api [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Task: {'id': task-1395317, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137482} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.717020] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1081.717020] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1081.717173] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1081.717244] env[61978]: INFO nova.compute.manager [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1081.717492] env[61978]: DEBUG oslo.service.loopingcall [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1081.718412] env[61978]: DEBUG nova.compute.manager [-] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1081.718412] env[61978]: DEBUG nova.network.neutron [-] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1081.722572] env[61978]: DEBUG nova.scheduler.client.report [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1081.809181] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1081.809487] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1081.809654] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1081.809872] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1081.810415] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1081.810613] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1081.810853] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1081.811057] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1081.811259] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1081.811455] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1081.811640] env[61978]: DEBUG nova.virt.hardware [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1081.818827] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Reconfiguring VM instance instance-00000019 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1081.820582] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bba8a139-3bb8-4462-a645-39bfd5d339e2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.844385] env[61978]: DEBUG oslo_vmware.api [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1081.844385] env[61978]: value = "task-1395319" [ 1081.844385] env[61978]: _type = "Task" [ 1081.844385] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.854398] env[61978]: DEBUG oslo_vmware.api [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395319, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.937832] env[61978]: DEBUG oslo_concurrency.lockutils [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.938217] env[61978]: DEBUG oslo_concurrency.lockutils [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.938453] env[61978]: DEBUG oslo_concurrency.lockutils [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.938702] env[61978]: DEBUG oslo_concurrency.lockutils [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.938895] env[61978]: DEBUG oslo_concurrency.lockutils [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.941246] env[61978]: INFO nova.compute.manager [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Terminating instance [ 1081.943124] env[61978]: DEBUG nova.compute.manager [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1081.943375] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1081.944460] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559dbf5b-dd77-475a-bf25-dbde6113ce1b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.952733] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1081.953038] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd693885-666e-4044-aee3-dd4c77ee520e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.961156] env[61978]: DEBUG oslo_vmware.api [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1081.961156] env[61978]: value = "task-1395320" [ 1081.961156] env[61978]: _type = "Task" [ 1081.961156] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.969940] env[61978]: DEBUG oslo_vmware.api [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395320, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.020158] env[61978]: DEBUG nova.compute.manager [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1082.043181] env[61978]: DEBUG nova.virt.hardware [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1082.043455] env[61978]: DEBUG nova.virt.hardware [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1082.043615] env[61978]: DEBUG nova.virt.hardware [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1082.043802] env[61978]: DEBUG nova.virt.hardware [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1082.043953] env[61978]: DEBUG nova.virt.hardware [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1082.044137] env[61978]: DEBUG nova.virt.hardware [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1082.044353] env[61978]: DEBUG nova.virt.hardware [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1082.044555] env[61978]: DEBUG nova.virt.hardware [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1082.044753] env[61978]: DEBUG nova.virt.hardware [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1082.044928] env[61978]: DEBUG nova.virt.hardware [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1082.045127] env[61978]: DEBUG nova.virt.hardware [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1082.046073] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43435f6-c048-42d3-9d50-d3d12c8a6781 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.056129] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea9233d-dd94-41c4-b0d5-51bd78ef2f1f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.153027] env[61978]: DEBUG oslo_vmware.api [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395318, 'name': PowerOnVM_Task, 'duration_secs': 0.438993} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.153467] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1082.153750] env[61978]: DEBUG nova.compute.manager [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1082.154665] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688938ad-f35f-4aa9-a9f1-76a969c75138 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.227334] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.229s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.229719] env[61978]: DEBUG oslo_concurrency.lockutils [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.020s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.230059] env[61978]: DEBUG nova.objects.instance [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lazy-loading 'resources' on Instance uuid 7e71c8de-1f94-4161-8ad8-a67792c5ce24 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.261407] env[61978]: INFO nova.scheduler.client.report [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleted allocations for instance 85fc5af8-454d-4042-841a-945b7e84eb6c [ 1082.358442] env[61978]: DEBUG oslo_vmware.api [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395319, 'name': ReconfigVM_Task, 'duration_secs': 0.187413} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.358442] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Reconfigured VM instance instance-00000019 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1082.358442] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8d3e9b-b292-4441-8bd7-6874d8b77bb1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.382198] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] c17c986e-c008-4414-8dd1-4ea836458048/c17c986e-c008-4414-8dd1-4ea836458048.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1082.384019] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1abb3dba-c5a1-4440-b1ec-0c13ac582d6f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.401921] env[61978]: DEBUG oslo_vmware.api [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1082.401921] env[61978]: value = "task-1395321" [ 1082.401921] env[61978]: _type = "Task" [ 1082.401921] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.410399] env[61978]: DEBUG oslo_vmware.api [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395321, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.447151] env[61978]: DEBUG nova.compute.manager [req-2dc5b841-1410-4ebc-b984-9e367935b1e5 req-ce4ccf0c-ceca-473c-8310-bfbbaa49c182 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Received event network-vif-deleted-82595737-f96a-45c3-9bcc-2642e53bdaec {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1082.447151] env[61978]: INFO nova.compute.manager [req-2dc5b841-1410-4ebc-b984-9e367935b1e5 req-ce4ccf0c-ceca-473c-8310-bfbbaa49c182 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Neutron deleted interface 82595737-f96a-45c3-9bcc-2642e53bdaec; detaching it from the instance and deleting it from the info cache [ 1082.447151] env[61978]: DEBUG nova.network.neutron [req-2dc5b841-1410-4ebc-b984-9e367935b1e5 req-ce4ccf0c-ceca-473c-8310-bfbbaa49c182 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.472136] env[61978]: DEBUG oslo_vmware.api [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395320, 'name': PowerOffVM_Task, 'duration_secs': 0.237} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.479098] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1082.479098] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1082.479098] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7288b7d-5041-4d0a-9a2c-339957c5f3f5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.544024] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1082.544024] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1082.544024] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Deleting the datastore file [datastore1] 96bef3f3-a45c-43ba-a86a-66c1d5686ea6 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1082.544024] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-682b70c1-9a14-4156-8f80-453889a452ce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.549028] env[61978]: DEBUG oslo_vmware.api [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for the task: (returnval){ [ 1082.549028] env[61978]: value = "task-1395323" [ 1082.549028] env[61978]: _type = "Task" [ 1082.549028] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.557907] env[61978]: DEBUG oslo_vmware.api [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395323, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.661989] env[61978]: DEBUG nova.network.neutron [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Successfully updated port: 7c209a6b-d93f-45a5-b1df-3b7244e3a624 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1082.675030] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.767797] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8d4be7c4-13c8-40cd-9ec7-ac4424fbd440 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "85fc5af8-454d-4042-841a-945b7e84eb6c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.861s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.847152] env[61978]: DEBUG nova.compute.manager [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1082.848185] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11127314-7d82-4854-b620-42ecb3f5000a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.912673] env[61978]: DEBUG oslo_vmware.api [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395321, 'name': ReconfigVM_Task, 'duration_secs': 0.373015} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.912980] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Reconfigured VM instance instance-00000019 to attach disk [datastore2] c17c986e-c008-4414-8dd1-4ea836458048/c17c986e-c008-4414-8dd1-4ea836458048.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1082.913387] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance 'c17c986e-c008-4414-8dd1-4ea836458048' progress to 50 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1082.920135] env[61978]: DEBUG nova.network.neutron [-] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.950979] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5a73be7-20a7-4326-a801-a6b1dd486a73 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.960541] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e79f4c-c5ac-4229-9400-8be8878c660a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.996892] env[61978]: DEBUG nova.compute.manager [req-2dc5b841-1410-4ebc-b984-9e367935b1e5 req-ce4ccf0c-ceca-473c-8310-bfbbaa49c182 service nova] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Detach interface failed, port_id=82595737-f96a-45c3-9bcc-2642e53bdaec, reason: Instance eb7cb200-c162-4e92-8916-6d9abd5cf34d could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1083.061688] env[61978]: DEBUG oslo_vmware.api [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Task: {'id': task-1395323, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138347} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.064568] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1083.064831] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1083.065031] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1083.065253] env[61978]: INFO nova.compute.manager [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1083.065528] env[61978]: DEBUG oslo.service.loopingcall [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1083.066096] env[61978]: DEBUG nova.compute.manager [-] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1083.066202] env[61978]: DEBUG nova.network.neutron [-] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1083.164813] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Acquiring lock "refresh_cache-fdd0c16d-b0f8-4f81-9069-34d11f273acb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.164964] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Acquired lock "refresh_cache-fdd0c16d-b0f8-4f81-9069-34d11f273acb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.165127] env[61978]: DEBUG nova.network.neutron [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1083.220612] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38a79d5-bdee-4046-890a-174e323993aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.230944] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf85cf37-5117-490f-b666-da8643caea53 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.267313] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a77ca84-19ac-4801-9add-ac3c7034ae75 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.276496] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d23a31-c31e-480c-b652-8186e553d524 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.291380] env[61978]: DEBUG nova.compute.provider_tree [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1083.367573] env[61978]: INFO nova.compute.manager [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] instance snapshotting [ 1083.372019] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56083864-c880-4608-bf44-f93715279de5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.390820] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18dd1ecf-98e6-4ea8-84c0-3940a84a0036 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.425822] env[61978]: INFO nova.compute.manager [-] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Took 1.71 seconds to deallocate network for instance. [ 1083.425822] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ea191c-7ec1-4943-b6e8-46192d508ef0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.447796] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88bfea96-523c-4c29-97c5-4c61d1ed8398 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.471392] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance 'c17c986e-c008-4414-8dd1-4ea836458048' progress to 67 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1083.644580] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "92eb5edb-803b-48d4-8c4f-338d7c3b3d13" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.644580] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "92eb5edb-803b-48d4-8c4f-338d7c3b3d13" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.644580] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "92eb5edb-803b-48d4-8c4f-338d7c3b3d13-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.644580] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "92eb5edb-803b-48d4-8c4f-338d7c3b3d13-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.644988] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "92eb5edb-803b-48d4-8c4f-338d7c3b3d13-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.647201] env[61978]: INFO nova.compute.manager [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Terminating instance [ 1083.648998] env[61978]: DEBUG nova.compute.manager [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1083.649115] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1083.649970] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cab5cab-b70b-4f3b-8ac3-27da3554d2b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.658523] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1083.658864] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f98b6f58-53f7-4c7f-8d39-db0ed69ccf13 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.668584] env[61978]: DEBUG oslo_vmware.api [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 1083.668584] env[61978]: value = "task-1395324" [ 1083.668584] env[61978]: _type = "Task" [ 1083.668584] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.678893] env[61978]: DEBUG oslo_vmware.api [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395324, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.709393] env[61978]: DEBUG nova.network.neutron [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1083.725047] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "f4034944-3a9d-4e14-a545-0bf574465e0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.725047] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "f4034944-3a9d-4e14-a545-0bf574465e0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.725047] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "f4034944-3a9d-4e14-a545-0bf574465e0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.725201] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "f4034944-3a9d-4e14-a545-0bf574465e0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.725251] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "f4034944-3a9d-4e14-a545-0bf574465e0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.729500] env[61978]: INFO nova.compute.manager [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Terminating instance [ 1083.731903] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "refresh_cache-f4034944-3a9d-4e14-a545-0bf574465e0b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.731903] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquired lock "refresh_cache-f4034944-3a9d-4e14-a545-0bf574465e0b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.731903] env[61978]: DEBUG nova.network.neutron [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1083.796233] env[61978]: DEBUG nova.scheduler.client.report [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1083.871545] env[61978]: DEBUG nova.network.neutron [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Updating instance_info_cache with network_info: [{"id": "7c209a6b-d93f-45a5-b1df-3b7244e3a624", "address": "fa:16:3e:77:e4:92", "network": {"id": "fd9afb25-c8d7-405a-9c8f-e0acc08ca108", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-738004830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef38e57bbc04a7ebf167286cdffd07c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c209a6b-d9", "ovs_interfaceid": "7c209a6b-d93f-45a5-b1df-3b7244e3a624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.902165] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1083.902509] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-eeb7d8ea-c63c-427a-9b25-6000f507df0f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.911367] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1083.911367] env[61978]: value = "task-1395325" [ 1083.911367] env[61978]: _type = "Task" [ 1083.911367] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.920120] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395325, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.948791] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.015215] env[61978]: DEBUG nova.network.neutron [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Port 394a8251-684b-4ddc-ae5c-7ef7ec06b503 binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1084.081036] env[61978]: DEBUG nova.network.neutron [-] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.183045] env[61978]: DEBUG oslo_vmware.api [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395324, 'name': PowerOffVM_Task, 'duration_secs': 0.336238} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.183342] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1084.183534] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1084.183797] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21dc891a-a020-4e76-a655-942878a18f7a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.248513] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1084.248685] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1084.248883] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleting the datastore file [datastore1] 92eb5edb-803b-48d4-8c4f-338d7c3b3d13 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1084.249190] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78ea93cd-ee10-46d3-a1f9-96b45511e225 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.253442] env[61978]: DEBUG nova.network.neutron [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1084.256420] env[61978]: DEBUG oslo_vmware.api [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for the task: (returnval){ [ 1084.256420] env[61978]: value = "task-1395327" [ 1084.256420] env[61978]: _type = "Task" [ 1084.256420] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.264561] env[61978]: DEBUG oslo_vmware.api [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395327, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.301961] env[61978]: DEBUG oslo_concurrency.lockutils [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.072s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.304774] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 25.738s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.304774] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.304774] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1084.305236] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.293s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.305236] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.306894] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.860s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.307222] env[61978]: DEBUG nova.objects.instance [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lazy-loading 'resources' on Instance uuid cb004a19-0048-4766-af7c-0fbde867f422 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.308999] env[61978]: DEBUG nova.network.neutron [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.310974] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f54a6b9-da75-4e2c-87ae-5fff192efd90 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.319874] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62394b17-bf4f-4b9c-9635-86ee5cacff4e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.335113] env[61978]: INFO nova.scheduler.client.report [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Deleted allocations for instance f930ab49-c215-4b2e-92b1-21c0d52a70eb [ 1084.336559] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc738af0-f16d-4343-bbb1-77cd45f4e415 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.339760] env[61978]: INFO nova.scheduler.client.report [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted allocations for instance 7e71c8de-1f94-4161-8ad8-a67792c5ce24 [ 1084.349586] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd75d97-f965-4e68-9871-f8a843aa63df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.378710] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Releasing lock "refresh_cache-fdd0c16d-b0f8-4f81-9069-34d11f273acb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.379997] env[61978]: DEBUG nova.compute.manager [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Instance network_info: |[{"id": "7c209a6b-d93f-45a5-b1df-3b7244e3a624", "address": "fa:16:3e:77:e4:92", "network": {"id": "fd9afb25-c8d7-405a-9c8f-e0acc08ca108", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-738004830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef38e57bbc04a7ebf167286cdffd07c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c209a6b-d9", "ovs_interfaceid": "7c209a6b-d93f-45a5-b1df-3b7244e3a624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1084.379997] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178684MB free_disk=184GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1084.379997] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.379997] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:e4:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4954661-ff70-43dd-bc60-8cbca6b9cbfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c209a6b-d93f-45a5-b1df-3b7244e3a624', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1084.387251] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Creating folder: Project (5ef38e57bbc04a7ebf167286cdffd07c). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1084.388563] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23ab4f41-2814-44e6-bd4c-626070bc0d3a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.399940] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Created folder: Project (5ef38e57bbc04a7ebf167286cdffd07c) in parent group-v295764. [ 1084.400174] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Creating folder: Instances. Parent ref: group-v295943. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1084.400420] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83a8f00f-5dab-4331-8834-08f45d8c5f0f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.411201] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Created folder: Instances in parent group-v295943. [ 1084.411451] env[61978]: DEBUG oslo.service.loopingcall [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1084.414596] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1084.414846] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fddeb61-54b9-4183-963a-d46770f2bcf1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.435790] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395325, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.437113] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1084.437113] env[61978]: value = "task-1395330" [ 1084.437113] env[61978]: _type = "Task" [ 1084.437113] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.444722] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395330, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.505071] env[61978]: DEBUG nova.compute.manager [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Received event network-vif-plugged-7c209a6b-d93f-45a5-b1df-3b7244e3a624 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1084.505560] env[61978]: DEBUG oslo_concurrency.lockutils [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] Acquiring lock "fdd0c16d-b0f8-4f81-9069-34d11f273acb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.505830] env[61978]: DEBUG oslo_concurrency.lockutils [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] Lock "fdd0c16d-b0f8-4f81-9069-34d11f273acb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.506059] env[61978]: DEBUG oslo_concurrency.lockutils [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] Lock "fdd0c16d-b0f8-4f81-9069-34d11f273acb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.506367] env[61978]: DEBUG nova.compute.manager [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] No waiting events found dispatching network-vif-plugged-7c209a6b-d93f-45a5-b1df-3b7244e3a624 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1084.506747] env[61978]: WARNING nova.compute.manager [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Received unexpected event network-vif-plugged-7c209a6b-d93f-45a5-b1df-3b7244e3a624 for instance with vm_state building and task_state spawning. [ 1084.506835] env[61978]: DEBUG nova.compute.manager [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Received event network-changed-7c209a6b-d93f-45a5-b1df-3b7244e3a624 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1084.507083] env[61978]: DEBUG nova.compute.manager [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Refreshing instance network info cache due to event network-changed-7c209a6b-d93f-45a5-b1df-3b7244e3a624. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1084.507368] env[61978]: DEBUG oslo_concurrency.lockutils [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] Acquiring lock "refresh_cache-fdd0c16d-b0f8-4f81-9069-34d11f273acb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.507564] env[61978]: DEBUG oslo_concurrency.lockutils [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] Acquired lock "refresh_cache-fdd0c16d-b0f8-4f81-9069-34d11f273acb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.507890] env[61978]: DEBUG nova.network.neutron [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Refreshing network info cache for port 7c209a6b-d93f-45a5-b1df-3b7244e3a624 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1084.583854] env[61978]: INFO nova.compute.manager [-] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Took 1.52 seconds to deallocate network for instance. [ 1084.770536] env[61978]: DEBUG oslo_vmware.api [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Task: {'id': task-1395327, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142544} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.770881] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1084.771141] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1084.771345] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1084.771545] env[61978]: INFO nova.compute.manager [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1084.771835] env[61978]: DEBUG oslo.service.loopingcall [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1084.772076] env[61978]: DEBUG nova.compute.manager [-] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1084.772210] env[61978]: DEBUG nova.network.neutron [-] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1084.814886] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Releasing lock "refresh_cache-f4034944-3a9d-4e14-a545-0bf574465e0b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.815797] env[61978]: DEBUG nova.compute.manager [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1084.816156] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1084.820638] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f410a610-3d67-43be-b86f-79cb4d56adb6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.829173] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1084.829489] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22a7e0d6-47b8-44ba-a8f0-92e6a9d74853 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.838382] env[61978]: DEBUG oslo_vmware.api [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1084.838382] env[61978]: value = "task-1395331" [ 1084.838382] env[61978]: _type = "Task" [ 1084.838382] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.857386] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c9c48ed6-4f25-4013-b92e-56626d90c290 tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "f930ab49-c215-4b2e-92b1-21c0d52a70eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.828s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.861377] env[61978]: DEBUG oslo_vmware.api [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395331, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.865399] env[61978]: DEBUG oslo_concurrency.lockutils [None req-234e19df-eea8-44c7-af14-90e745d98041 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "7e71c8de-1f94-4161-8ad8-a67792c5ce24" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.295s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.921488] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395325, 'name': CreateSnapshot_Task, 'duration_secs': 0.675257} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.924216] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1084.925405] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6663341-547a-46aa-afc7-22207ebedd57 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.948104] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395330, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.046591] env[61978]: DEBUG oslo_concurrency.lockutils [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "c17c986e-c008-4414-8dd1-4ea836458048-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.046904] env[61978]: DEBUG oslo_concurrency.lockutils [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "c17c986e-c008-4414-8dd1-4ea836458048-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.047138] env[61978]: DEBUG oslo_concurrency.lockutils [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "c17c986e-c008-4414-8dd1-4ea836458048-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.092548] env[61978]: DEBUG oslo_concurrency.lockutils [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.276840] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0922929d-2a70-45b2-a051-6a935290f44e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.284340] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79a109e-9292-4fac-9b24-2fe282144ac3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.316720] env[61978]: DEBUG nova.network.neutron [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Updated VIF entry in instance network info cache for port 7c209a6b-d93f-45a5-b1df-3b7244e3a624. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1085.317155] env[61978]: DEBUG nova.network.neutron [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Updating instance_info_cache with network_info: [{"id": "7c209a6b-d93f-45a5-b1df-3b7244e3a624", "address": "fa:16:3e:77:e4:92", "network": {"id": "fd9afb25-c8d7-405a-9c8f-e0acc08ca108", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-738004830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef38e57bbc04a7ebf167286cdffd07c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c209a6b-d9", "ovs_interfaceid": "7c209a6b-d93f-45a5-b1df-3b7244e3a624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.319180] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c95ea9-3ec4-4b37-b6cc-faf6b4533a59 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.328146] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9090f8-cce3-4397-93c4-45fac57d2420 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.342451] env[61978]: DEBUG nova.compute.provider_tree [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1085.352763] env[61978]: DEBUG oslo_vmware.api [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395331, 'name': PowerOffVM_Task, 'duration_secs': 0.31955} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.353045] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1085.353283] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1085.353590] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7868a459-6241-439a-ae5a-e9433b1b48ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.377556] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1085.377806] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1085.377992] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Deleting the datastore file [datastore2] f4034944-3a9d-4e14-a545-0bf574465e0b {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1085.378287] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ede365b-5c71-4308-a7be-7a042e14649a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.384482] env[61978]: DEBUG oslo_vmware.api [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1085.384482] env[61978]: value = "task-1395333" [ 1085.384482] env[61978]: _type = "Task" [ 1085.384482] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.392661] env[61978]: DEBUG oslo_vmware.api [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395333, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.449491] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1085.452976] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3c726148-e7e5-4b1b-9350-a1eb01634075 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.463546] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395330, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.464789] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1085.464789] env[61978]: value = "task-1395334" [ 1085.464789] env[61978]: _type = "Task" [ 1085.464789] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.472298] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395334, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.680566] env[61978]: DEBUG nova.network.neutron [-] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.823472] env[61978]: DEBUG oslo_concurrency.lockutils [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] Releasing lock "refresh_cache-fdd0c16d-b0f8-4f81-9069-34d11f273acb" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.823804] env[61978]: DEBUG nova.compute.manager [req-c19f3fae-7e0c-4bf2-afb3-57f23343b923 req-ffcbe668-4546-45be-962d-ec9bc7d3aafc service nova] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Received event network-vif-deleted-ebb15ef6-0310-4f67-8247-f09f03d452db {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1085.848638] env[61978]: DEBUG nova.scheduler.client.report [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1085.894334] env[61978]: DEBUG oslo_vmware.api [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395333, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095915} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.894643] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1085.894850] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1085.895054] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1085.895255] env[61978]: INFO nova.compute.manager [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1085.895562] env[61978]: DEBUG oslo.service.loopingcall [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1085.895783] env[61978]: DEBUG nova.compute.manager [-] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1085.895880] env[61978]: DEBUG nova.network.neutron [-] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1085.922888] env[61978]: DEBUG nova.network.neutron [-] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1085.951674] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395330, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.974679] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395334, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.101280] env[61978]: DEBUG oslo_concurrency.lockutils [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.101280] env[61978]: DEBUG oslo_concurrency.lockutils [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.101425] env[61978]: DEBUG nova.network.neutron [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1086.182900] env[61978]: INFO nova.compute.manager [-] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Took 1.41 seconds to deallocate network for instance. [ 1086.252541] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Acquiring lock "f1001633-e4e5-4de1-8a6b-cf653e43d821" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.252826] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Lock "f1001633-e4e5-4de1-8a6b-cf653e43d821" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.253076] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Acquiring lock "f1001633-e4e5-4de1-8a6b-cf653e43d821-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.253243] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Lock "f1001633-e4e5-4de1-8a6b-cf653e43d821-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.253422] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Lock "f1001633-e4e5-4de1-8a6b-cf653e43d821-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.255553] env[61978]: INFO nova.compute.manager [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Terminating instance [ 1086.258030] env[61978]: DEBUG nova.compute.manager [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1086.258030] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1086.258938] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f7c996-d56a-483c-83cc-dba18cd0ae1f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.267265] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1086.267492] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7d2c517-410f-4407-b5eb-1f569869ab36 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.273578] env[61978]: DEBUG oslo_vmware.api [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Waiting for the task: (returnval){ [ 1086.273578] env[61978]: value = "task-1395335" [ 1086.273578] env[61978]: _type = "Task" [ 1086.273578] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.281036] env[61978]: DEBUG oslo_vmware.api [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1395335, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.354276] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.047s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.356909] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.226s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.359144] env[61978]: INFO nova.compute.claims [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1086.376781] env[61978]: INFO nova.scheduler.client.report [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Deleted allocations for instance cb004a19-0048-4766-af7c-0fbde867f422 [ 1086.424987] env[61978]: DEBUG nova.network.neutron [-] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.453902] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395330, 'name': CreateVM_Task, 'duration_secs': 1.519612} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.454196] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1086.454999] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.455271] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.455755] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1086.456167] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "b26a4784-698d-477a-8db7-58156899d231" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.456449] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "b26a4784-698d-477a-8db7-58156899d231" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.456730] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "b26a4784-698d-477a-8db7-58156899d231-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.457017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "b26a4784-698d-477a-8db7-58156899d231-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.457267] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "b26a4784-698d-477a-8db7-58156899d231-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.458736] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df7761f1-f651-4c55-aefb-89fe4db07df3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.460696] env[61978]: INFO nova.compute.manager [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Terminating instance [ 1086.462893] env[61978]: DEBUG nova.compute.manager [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1086.463179] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1086.464378] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967f1a5f-050a-4205-94ef-96a2994abbeb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.469028] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Waiting for the task: (returnval){ [ 1086.469028] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524113d5-e7f9-dff3-4cc3-fe4cccd1fecb" [ 1086.469028] env[61978]: _type = "Task" [ 1086.469028] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.480853] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1086.480853] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8898203f-6e3c-40ae-b2d2-e14606740771 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.489910] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524113d5-e7f9-dff3-4cc3-fe4cccd1fecb, 'name': SearchDatastore_Task, 'duration_secs': 0.012881} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.490383] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395334, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.491135] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.491603] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1086.491930] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.493116] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.493116] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1086.493116] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30c44ebe-7017-4dba-9741-7915208cc6cd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.496174] env[61978]: DEBUG oslo_vmware.api [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1086.496174] env[61978]: value = "task-1395336" [ 1086.496174] env[61978]: _type = "Task" [ 1086.496174] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.504929] env[61978]: DEBUG oslo_vmware.api [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.514904] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1086.515182] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1086.516035] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14982365-f056-41ce-b5ad-598ef35550e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.522664] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Waiting for the task: (returnval){ [ 1086.522664] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e6fade-344f-7788-58cc-89779fd03a55" [ 1086.522664] env[61978]: _type = "Task" [ 1086.522664] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.531654] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e6fade-344f-7788-58cc-89779fd03a55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.583754] env[61978]: DEBUG nova.compute.manager [req-4572aacb-27d2-48b4-a656-b87afb0ab577 req-928da7ee-db9f-4a1d-a13a-edafd25a29f3 service nova] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Received event network-vif-deleted-15ee1476-11da-4794-a070-c4365a572948 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1086.689623] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.786144] env[61978]: DEBUG oslo_vmware.api [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1395335, 'name': PowerOffVM_Task, 'duration_secs': 0.429682} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.786447] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1086.786622] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1086.786881] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a01b5054-94d4-4d01-a656-c2d7c123a19c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.873300] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1086.873300] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1086.873300] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Deleting the datastore file [datastore2] f1001633-e4e5-4de1-8a6b-cf653e43d821 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1086.873728] env[61978]: DEBUG nova.network.neutron [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance_info_cache with network_info: [{"id": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "address": "fa:16:3e:5c:ce:8c", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap394a8251-68", "ovs_interfaceid": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.875567] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d11b6fc-5348-4234-88dd-5dd4a5626e34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.886950] env[61978]: DEBUG oslo_vmware.api [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Waiting for the task: (returnval){ [ 1086.886950] env[61978]: value = "task-1395338" [ 1086.886950] env[61978]: _type = "Task" [ 1086.886950] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.887914] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0c5af36a-513c-49b7-ae7b-8ac990146168 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "cb004a19-0048-4766-af7c-0fbde867f422" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.901s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.898232] env[61978]: DEBUG oslo_vmware.api [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1395338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.929271] env[61978]: INFO nova.compute.manager [-] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Took 1.03 seconds to deallocate network for instance. [ 1086.979467] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395334, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.008494] env[61978]: DEBUG oslo_vmware.api [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395336, 'name': PowerOffVM_Task, 'duration_secs': 0.205867} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.009127] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1087.009288] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1087.009577] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d8a73e2-c02f-4575-bfbc-f4e41feebb97 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.032708] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e6fade-344f-7788-58cc-89779fd03a55, 'name': SearchDatastore_Task, 'duration_secs': 0.012031} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.033521] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ecc0ace-1c67-46ab-9c9d-42f4f43db1f3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.038713] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Waiting for the task: (returnval){ [ 1087.038713] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523a383f-804f-e92f-d03e-c55dfb76be1a" [ 1087.038713] env[61978]: _type = "Task" [ 1087.038713] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.046851] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523a383f-804f-e92f-d03e-c55dfb76be1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.083711] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1087.083953] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1087.084171] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Deleting the datastore file [datastore1] b26a4784-698d-477a-8db7-58156899d231 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1087.084465] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b10b959f-dc91-4aab-8910-1bde6701bb0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.090605] env[61978]: DEBUG oslo_vmware.api [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for the task: (returnval){ [ 1087.090605] env[61978]: value = "task-1395340" [ 1087.090605] env[61978]: _type = "Task" [ 1087.090605] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.099194] env[61978]: DEBUG oslo_vmware.api [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.204893] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.205140] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.221451] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "b356fc81-f857-4416-8eb0-28c66d137967" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.221709] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "b356fc81-f857-4416-8eb0-28c66d137967" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.221919] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "b356fc81-f857-4416-8eb0-28c66d137967-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.222130] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "b356fc81-f857-4416-8eb0-28c66d137967-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.222338] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "b356fc81-f857-4416-8eb0-28c66d137967-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.224385] env[61978]: INFO nova.compute.manager [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Terminating instance [ 1087.226151] env[61978]: DEBUG nova.compute.manager [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1087.226372] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1087.227236] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37987594-57b9-49be-ae25-44491f9bbfbf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.235100] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1087.235340] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23704278-5722-4e16-a3ca-3c7efadb98c1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.241402] env[61978]: DEBUG oslo_vmware.api [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1087.241402] env[61978]: value = "task-1395341" [ 1087.241402] env[61978]: _type = "Task" [ 1087.241402] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.249293] env[61978]: DEBUG oslo_vmware.api [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395341, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.376152] env[61978]: DEBUG oslo_concurrency.lockutils [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.402890] env[61978]: DEBUG oslo_vmware.api [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Task: {'id': task-1395338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264272} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.403332] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.403671] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1087.404117] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1087.404344] env[61978]: INFO nova.compute.manager [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1087.404867] env[61978]: DEBUG oslo.service.loopingcall [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1087.404976] env[61978]: DEBUG nova.compute.manager [-] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1087.405118] env[61978]: DEBUG nova.network.neutron [-] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1087.437339] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.482374] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395334, 'name': CloneVM_Task, 'duration_secs': 1.711652} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.482374] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Created linked-clone VM from snapshot [ 1087.482374] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6235e436-f76a-4742-af3d-11096430cb6f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.490985] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Uploading image 2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1087.527572] env[61978]: DEBUG oslo_vmware.rw_handles [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1087.527572] env[61978]: value = "vm-295947" [ 1087.527572] env[61978]: _type = "VirtualMachine" [ 1087.527572] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1087.527776] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a90aa768-52d3-4d28-8f1a-04c327965ed4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.534461] env[61978]: DEBUG oslo_vmware.rw_handles [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lease: (returnval){ [ 1087.534461] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ba314b-e688-d03b-1d02-318d2c2871a3" [ 1087.534461] env[61978]: _type = "HttpNfcLease" [ 1087.534461] env[61978]: } obtained for exporting VM: (result){ [ 1087.534461] env[61978]: value = "vm-295947" [ 1087.534461] env[61978]: _type = "VirtualMachine" [ 1087.534461] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1087.534857] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the lease: (returnval){ [ 1087.534857] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ba314b-e688-d03b-1d02-318d2c2871a3" [ 1087.534857] env[61978]: _type = "HttpNfcLease" [ 1087.534857] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1087.547228] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1087.547228] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ba314b-e688-d03b-1d02-318d2c2871a3" [ 1087.547228] env[61978]: _type = "HttpNfcLease" [ 1087.547228] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1087.552692] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523a383f-804f-e92f-d03e-c55dfb76be1a, 'name': SearchDatastore_Task, 'duration_secs': 0.014325} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.553580] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.553865] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] fdd0c16d-b0f8-4f81-9069-34d11f273acb/fdd0c16d-b0f8-4f81-9069-34d11f273acb.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1087.554330] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5af5fc6d-f04d-4c2a-bbc1-6dad7c6d898a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.561236] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Waiting for the task: (returnval){ [ 1087.561236] env[61978]: value = "task-1395343" [ 1087.561236] env[61978]: _type = "Task" [ 1087.561236] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.575014] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395343, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.604985] env[61978]: DEBUG oslo_vmware.api [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Task: {'id': task-1395340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.337384} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.604985] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.605252] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1087.605491] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1087.605676] env[61978]: INFO nova.compute.manager [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] [instance: b26a4784-698d-477a-8db7-58156899d231] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1087.605923] env[61978]: DEBUG oslo.service.loopingcall [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1087.606127] env[61978]: DEBUG nova.compute.manager [-] [instance: b26a4784-698d-477a-8db7-58156899d231] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1087.606763] env[61978]: DEBUG nova.network.neutron [-] [instance: b26a4784-698d-477a-8db7-58156899d231] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1087.707122] env[61978]: DEBUG nova.compute.manager [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1087.753530] env[61978]: DEBUG oslo_vmware.api [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395341, 'name': PowerOffVM_Task, 'duration_secs': 0.2799} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.753848] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1087.754239] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1087.755308] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a98b6b7d-9854-4245-aa53-d8254942f7b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.804040] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e51e01-569d-4280-a44b-5918b92bbb8a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.810557] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6b37ed-6287-4f44-ae47-dc3599ac84be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.840905] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe6771e-7135-451d-8bb6-6efd831a0902 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.852144] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d490af-aca4-4364-950f-9a4818f7b469 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.870119] env[61978]: DEBUG nova.compute.provider_tree [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1087.874146] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1087.874146] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1087.874472] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Deleting the datastore file [datastore2] b356fc81-f857-4416-8eb0-28c66d137967 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1087.875112] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62c040a0-cdd6-4ab6-a9b8-fb78ef07d24e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.889504] env[61978]: DEBUG oslo_vmware.api [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for the task: (returnval){ [ 1087.889504] env[61978]: value = "task-1395345" [ 1087.889504] env[61978]: _type = "Task" [ 1087.889504] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.902041] env[61978]: DEBUG oslo_vmware.api [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395345, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.903912] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053a665e-614a-4fd7-adc3-70a05fe7d8db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.927171] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e13f03e-22bc-46df-93fc-5298a03669e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.934871] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance 'c17c986e-c008-4414-8dd1-4ea836458048' progress to 83 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1088.047032] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1088.047032] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ba314b-e688-d03b-1d02-318d2c2871a3" [ 1088.047032] env[61978]: _type = "HttpNfcLease" [ 1088.047032] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1088.047780] env[61978]: DEBUG oslo_vmware.rw_handles [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1088.047780] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ba314b-e688-d03b-1d02-318d2c2871a3" [ 1088.047780] env[61978]: _type = "HttpNfcLease" [ 1088.047780] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1088.048148] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6722eb71-1f8a-4726-af96-374d34df4d0e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.058030] env[61978]: DEBUG oslo_vmware.rw_handles [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e2e1b3-ffa4-85ec-cbe0-b11414c2cb6b/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1088.058030] env[61978]: DEBUG oslo_vmware.rw_handles [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e2e1b3-ffa4-85ec-cbe0-b11414c2cb6b/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1088.133034] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395343, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.183117] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f4138d3a-b9e2-452b-a740-09e88144e051 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.234730] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.396230] env[61978]: ERROR nova.scheduler.client.report [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [req-2a24619a-c103-45da-9360-342a4f86e3c1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2a24619a-c103-45da-9360-342a4f86e3c1"}]} [ 1088.406914] env[61978]: DEBUG oslo_vmware.api [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Task: {'id': task-1395345, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.415327} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.406914] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1088.406914] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1088.407211] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1088.407295] env[61978]: INFO nova.compute.manager [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1088.407644] env[61978]: DEBUG oslo.service.loopingcall [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1088.408297] env[61978]: DEBUG nova.compute.manager [-] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1088.408400] env[61978]: DEBUG nova.network.neutron [-] [instance: b356fc81-f857-4416-8eb0-28c66d137967] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1088.414505] env[61978]: DEBUG nova.scheduler.client.report [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1088.437016] env[61978]: DEBUG nova.scheduler.client.report [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1088.437298] env[61978]: DEBUG nova.compute.provider_tree [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1088.440937] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-84b9fb5e-33f6-40b9-808e-d9c89604571d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance 'c17c986e-c008-4414-8dd1-4ea836458048' progress to 100 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1088.465060] env[61978]: DEBUG nova.scheduler.client.report [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1088.500930] env[61978]: DEBUG nova.scheduler.client.report [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1088.632275] env[61978]: DEBUG nova.network.neutron [-] [instance: b26a4784-698d-477a-8db7-58156899d231] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.637058] env[61978]: DEBUG nova.network.neutron [-] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.642627] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395343, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612507} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.643434] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] fdd0c16d-b0f8-4f81-9069-34d11f273acb/fdd0c16d-b0f8-4f81-9069-34d11f273acb.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1088.643957] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1088.644379] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f42ef718-be18-4a04-b8cf-f8d1239c2085 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.658538] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Waiting for the task: (returnval){ [ 1088.658538] env[61978]: value = "task-1395346" [ 1088.658538] env[61978]: _type = "Task" [ 1088.658538] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.669544] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395346, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.790992] env[61978]: DEBUG nova.compute.manager [req-7b55beb1-17ed-4d18-aeb6-8a256aab6472 req-48325711-5af1-47a2-af21-548d85b23729 service nova] [instance: b26a4784-698d-477a-8db7-58156899d231] Received event network-vif-deleted-3a886f4f-5f7c-4f97-8f00-2555aebe9856 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1088.791466] env[61978]: DEBUG nova.compute.manager [req-7b55beb1-17ed-4d18-aeb6-8a256aab6472 req-48325711-5af1-47a2-af21-548d85b23729 service nova] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Received event network-vif-deleted-631d15db-8176-407e-8ab9-1b7e5a095d9a {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1088.993776] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65850755-40de-4bfa-a853-6730cee4c62a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.003261] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f1f1e1-776b-4b49-b68b-39b2adce1b65 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.035544] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ecf0d4-0720-455a-a03e-e9b6a7e2b7ba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.048326] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57410368-fa2e-44fc-8708-995c445e4186 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.060335] env[61978]: DEBUG nova.compute.provider_tree [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1089.135032] env[61978]: INFO nova.compute.manager [-] [instance: b26a4784-698d-477a-8db7-58156899d231] Took 1.53 seconds to deallocate network for instance. [ 1089.144700] env[61978]: INFO nova.compute.manager [-] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Took 1.74 seconds to deallocate network for instance. [ 1089.169719] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395346, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071711} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.170061] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1089.170919] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1cd5ff-aaad-4364-8996-648dd7c9dd76 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.196050] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] fdd0c16d-b0f8-4f81-9069-34d11f273acb/fdd0c16d-b0f8-4f81-9069-34d11f273acb.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1089.198540] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a1c743c-22d3-4679-9760-0ccf094492e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.217779] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Waiting for the task: (returnval){ [ 1089.217779] env[61978]: value = "task-1395347" [ 1089.217779] env[61978]: _type = "Task" [ 1089.217779] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.228284] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395347, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.249267] env[61978]: DEBUG nova.network.neutron [-] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.593038] env[61978]: DEBUG nova.scheduler.client.report [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 86 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1089.593298] env[61978]: DEBUG nova.compute.provider_tree [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 86 to 87 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1089.593597] env[61978]: DEBUG nova.compute.provider_tree [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1089.641976] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.653693] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.730141] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395347, 'name': ReconfigVM_Task, 'duration_secs': 0.353383} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.730547] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Reconfigured VM instance instance-00000042 to attach disk [datastore2] fdd0c16d-b0f8-4f81-9069-34d11f273acb/fdd0c16d-b0f8-4f81-9069-34d11f273acb.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1089.731213] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-488ca957-5aad-4ab7-998f-d85e2faa5205 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.738274] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Waiting for the task: (returnval){ [ 1089.738274] env[61978]: value = "task-1395348" [ 1089.738274] env[61978]: _type = "Task" [ 1089.738274] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.746637] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395348, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.752376] env[61978]: INFO nova.compute.manager [-] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Took 1.34 seconds to deallocate network for instance. [ 1090.099615] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.743s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.100942] env[61978]: DEBUG nova.compute.manager [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1090.102900] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.147s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.105371] env[61978]: INFO nova.compute.claims [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1090.248767] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395348, 'name': Rename_Task, 'duration_secs': 0.148403} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.248767] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1090.249055] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ba65a39-9478-4927-99b4-0442ba281620 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.256222] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Waiting for the task: (returnval){ [ 1090.256222] env[61978]: value = "task-1395349" [ 1090.256222] env[61978]: _type = "Task" [ 1090.256222] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.260527] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.265587] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395349, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.610516] env[61978]: DEBUG nova.compute.utils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1090.614669] env[61978]: DEBUG nova.compute.manager [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1090.614850] env[61978]: DEBUG nova.network.neutron [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1090.657079] env[61978]: DEBUG nova.policy [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1513c7acb97e4b208c73ccde70309ad7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '198eab494c0a4e0eb83bae5732df9c78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1090.684593] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "c17c986e-c008-4414-8dd1-4ea836458048" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.684877] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "c17c986e-c008-4414-8dd1-4ea836458048" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.685089] env[61978]: DEBUG nova.compute.manager [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Going to confirm migration 3 {{(pid=61978) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1090.766820] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395349, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.816243] env[61978]: DEBUG nova.compute.manager [req-5ebf84c6-52b9-41b2-8645-9cb2aaf2ad59 req-1aad07d1-25e2-4562-be19-bf7aa0179edb service nova] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Received event network-vif-deleted-2c83f5d8-5390-4c18-a494-54cb1cee93e4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1090.943386] env[61978]: DEBUG nova.network.neutron [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Successfully created port: bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1091.118561] env[61978]: DEBUG nova.compute.manager [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1091.267978] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395349, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.288620] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.288814] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.288994] env[61978]: DEBUG nova.network.neutron [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1091.289214] env[61978]: DEBUG nova.objects.instance [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'info_cache' on Instance uuid c17c986e-c008-4414-8dd1-4ea836458048 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.547129] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1787c166-8445-44e4-828c-35b9b73fb075 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.554691] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41cf37d-3a79-43f7-bd4a-aa4d1bfed553 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.586944] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313ad986-f8c8-4aa8-9b57-07a60a22a1d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.594354] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc918a37-4d27-480b-b36f-7a47e6e0f449 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.608196] env[61978]: DEBUG nova.compute.provider_tree [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.725292] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.725570] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.725808] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.726040] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.726221] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.728449] env[61978]: INFO nova.compute.manager [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Terminating instance [ 1091.730206] env[61978]: DEBUG nova.compute.manager [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1091.730407] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1091.731249] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c164504-12d2-4b7a-a3ff-705dad32782f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.739193] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1091.739455] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78a0c0c3-cef1-40a7-9d86-42775fdc50ce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.746076] env[61978]: DEBUG oslo_vmware.api [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1091.746076] env[61978]: value = "task-1395350" [ 1091.746076] env[61978]: _type = "Task" [ 1091.746076] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.755775] env[61978]: DEBUG oslo_vmware.api [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.767749] env[61978]: DEBUG oslo_vmware.api [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395349, 'name': PowerOnVM_Task, 'duration_secs': 1.122667} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.767930] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1091.768011] env[61978]: INFO nova.compute.manager [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Took 9.75 seconds to spawn the instance on the hypervisor. [ 1091.768256] env[61978]: DEBUG nova.compute.manager [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1091.769134] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e56d4f1-7f32-4029-990e-1ab114e9be6a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.111861] env[61978]: DEBUG nova.scheduler.client.report [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1092.132149] env[61978]: DEBUG nova.compute.manager [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1092.155635] env[61978]: DEBUG nova.virt.hardware [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1092.155951] env[61978]: DEBUG nova.virt.hardware [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1092.156946] env[61978]: DEBUG nova.virt.hardware [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1092.156946] env[61978]: DEBUG nova.virt.hardware [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1092.156946] env[61978]: DEBUG nova.virt.hardware [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1092.156946] env[61978]: DEBUG nova.virt.hardware [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1092.156946] env[61978]: DEBUG nova.virt.hardware [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1092.157173] env[61978]: DEBUG nova.virt.hardware [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1092.157214] env[61978]: DEBUG nova.virt.hardware [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1092.157364] env[61978]: DEBUG nova.virt.hardware [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1092.157547] env[61978]: DEBUG nova.virt.hardware [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1092.158441] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25da36ee-b0a5-4f70-ad48-7bdc42b4a20b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.166866] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba9a95a-21ab-4e85-9550-d946f0688c8f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.259503] env[61978]: DEBUG oslo_vmware.api [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395350, 'name': PowerOffVM_Task, 'duration_secs': 0.370809} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.259503] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1092.259503] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1092.259503] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f971381-f852-4199-af55-642947a8158e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.287645] env[61978]: INFO nova.compute.manager [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Took 40.11 seconds to build instance. [ 1092.617885] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.515s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.618462] env[61978]: DEBUG nova.compute.manager [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1092.626574] env[61978]: DEBUG oslo_concurrency.lockutils [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.692s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.626874] env[61978]: DEBUG nova.objects.instance [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lazy-loading 'resources' on Instance uuid f3a9f204-e4ed-49f1-85ef-8cea7377cf89 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1092.672565] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1092.672812] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1092.673009] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Deleting the datastore file [datastore2] f3c837fb-be7e-40a6-aae4-7f213c62ab2c {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1092.673293] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17649041-b74c-44f6-bb47-f54c4ae90866 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.680825] env[61978]: DEBUG oslo_vmware.api [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1092.680825] env[61978]: value = "task-1395352" [ 1092.680825] env[61978]: _type = "Task" [ 1092.680825] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.687324] env[61978]: DEBUG nova.network.neutron [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance_info_cache with network_info: [{"id": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "address": "fa:16:3e:5c:ce:8c", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap394a8251-68", "ovs_interfaceid": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.691886] env[61978]: DEBUG oslo_vmware.api [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395352, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.728298] env[61978]: DEBUG nova.network.neutron [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Successfully updated port: bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1092.791255] env[61978]: DEBUG oslo_concurrency.lockutils [None req-739daba9-b539-484a-810f-c9a9c83e2fa4 tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Lock "fdd0c16d-b0f8-4f81-9069-34d11f273acb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.621s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.869664] env[61978]: DEBUG nova.compute.manager [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Received event network-vif-plugged-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1092.870117] env[61978]: DEBUG oslo_concurrency.lockutils [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] Acquiring lock "17c56c1c-9992-4559-ad23-c68909ae6792-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.870194] env[61978]: DEBUG oslo_concurrency.lockutils [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] Lock "17c56c1c-9992-4559-ad23-c68909ae6792-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.870342] env[61978]: DEBUG oslo_concurrency.lockutils [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] Lock "17c56c1c-9992-4559-ad23-c68909ae6792-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.870522] env[61978]: DEBUG nova.compute.manager [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] No waiting events found dispatching network-vif-plugged-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1092.870693] env[61978]: WARNING nova.compute.manager [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Received unexpected event network-vif-plugged-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 for instance with vm_state building and task_state spawning. [ 1092.870902] env[61978]: DEBUG nova.compute.manager [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Received event network-changed-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1092.871102] env[61978]: DEBUG nova.compute.manager [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Refreshing instance network info cache due to event network-changed-bba3eeec-259f-4ea3-b0f6-e509a29d33f4. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1092.871307] env[61978]: DEBUG oslo_concurrency.lockutils [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] Acquiring lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.871457] env[61978]: DEBUG oslo_concurrency.lockutils [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] Acquired lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.871605] env[61978]: DEBUG nova.network.neutron [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Refreshing network info cache for port bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1093.134709] env[61978]: DEBUG nova.compute.utils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1093.138545] env[61978]: DEBUG nova.compute.manager [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1093.139054] env[61978]: DEBUG nova.network.neutron [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1093.191282] env[61978]: DEBUG oslo_vmware.api [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395352, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288963} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.193888] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1093.194296] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1093.194511] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1093.194693] env[61978]: INFO nova.compute.manager [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Took 1.46 seconds to destroy the instance on the hypervisor. [ 1093.194978] env[61978]: DEBUG oslo.service.loopingcall [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1093.195472] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.195704] env[61978]: DEBUG nova.objects.instance [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'migration_context' on Instance uuid c17c986e-c008-4414-8dd1-4ea836458048 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.196905] env[61978]: DEBUG nova.compute.manager [-] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1093.197014] env[61978]: DEBUG nova.network.neutron [-] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1093.200303] env[61978]: DEBUG nova.policy [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72c335696cee4638967757e4f4cdfe59', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c3fe7c7f560427db0f814a2c67bb527', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1093.232465] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.421124] env[61978]: DEBUG nova.network.neutron [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1093.568646] env[61978]: DEBUG nova.network.neutron [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.615087] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801619e5-b10c-4b2d-abbb-5b3269da202e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.623458] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0648eac8-30ea-4bf5-954c-f93bf76b7cd8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.657490] env[61978]: DEBUG nova.compute.manager [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1093.662917] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b12839b-5ec4-47ca-974d-79207da3e0d4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.671285] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b291aeff-3f9f-49ce-8ac1-46691ee1c8e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.685469] env[61978]: DEBUG nova.compute.provider_tree [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.700177] env[61978]: DEBUG nova.objects.base [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1093.701246] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fa9041-93f1-481f-bfdc-2a6a901b4e11 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.728603] env[61978]: DEBUG oslo_concurrency.lockutils [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Acquiring lock "fdd0c16d-b0f8-4f81-9069-34d11f273acb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.728861] env[61978]: DEBUG oslo_concurrency.lockutils [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Lock "fdd0c16d-b0f8-4f81-9069-34d11f273acb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.729187] env[61978]: DEBUG oslo_concurrency.lockutils [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Acquiring lock "fdd0c16d-b0f8-4f81-9069-34d11f273acb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.729298] env[61978]: DEBUG oslo_concurrency.lockutils [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Lock "fdd0c16d-b0f8-4f81-9069-34d11f273acb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.729458] env[61978]: DEBUG oslo_concurrency.lockutils [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Lock "fdd0c16d-b0f8-4f81-9069-34d11f273acb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.731026] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fafb1a2f-5726-4e13-8cb3-be64cfa9034c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.734242] env[61978]: INFO nova.compute.manager [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Terminating instance [ 1093.736749] env[61978]: DEBUG nova.compute.manager [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1093.736985] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1093.737871] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9f07bb-3c5d-43b8-b76a-c38b2c8fb603 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.742830] env[61978]: DEBUG oslo_vmware.api [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1093.742830] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52934fcf-f674-4922-2924-11b9ba27a14b" [ 1093.742830] env[61978]: _type = "Task" [ 1093.742830] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.748793] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1093.749429] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30562d07-cbdb-465e-9b9e-84c9608e5771 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.754858] env[61978]: DEBUG oslo_vmware.api [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52934fcf-f674-4922-2924-11b9ba27a14b, 'name': SearchDatastore_Task, 'duration_secs': 0.011509} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.755633] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.759962] env[61978]: DEBUG oslo_vmware.api [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Waiting for the task: (returnval){ [ 1093.759962] env[61978]: value = "task-1395353" [ 1093.759962] env[61978]: _type = "Task" [ 1093.759962] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.769158] env[61978]: DEBUG oslo_vmware.api [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.876042] env[61978]: DEBUG nova.network.neutron [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Successfully created port: e8b9a7d5-25ce-419f-b3e9-9179be86e340 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1094.074167] env[61978]: DEBUG oslo_concurrency.lockutils [req-60fa299a-b7db-46fa-8722-5d35489c8e75 req-487bdb2e-a2d8-4f6b-85ce-20737fa5e831 service nova] Releasing lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.074595] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.074757] env[61978]: DEBUG nova.network.neutron [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1094.188992] env[61978]: DEBUG nova.scheduler.client.report [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1094.274600] env[61978]: DEBUG oslo_vmware.api [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395353, 'name': PowerOffVM_Task, 'duration_secs': 0.260296} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.274600] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1094.274663] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1094.275735] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a2f0149-8964-4b89-9015-f11eb6d65bcf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.362790] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1094.364857] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1094.364857] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Deleting the datastore file [datastore2] fdd0c16d-b0f8-4f81-9069-34d11f273acb {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1094.364857] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00679890-5854-4203-abc5-79c7ac9c6ea0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.371800] env[61978]: DEBUG oslo_vmware.api [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Waiting for the task: (returnval){ [ 1094.371800] env[61978]: value = "task-1395355" [ 1094.371800] env[61978]: _type = "Task" [ 1094.371800] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.381077] env[61978]: DEBUG oslo_vmware.api [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.613892] env[61978]: DEBUG nova.network.neutron [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1094.668430] env[61978]: DEBUG nova.compute.manager [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1094.688487] env[61978]: DEBUG nova.network.neutron [-] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.695110] env[61978]: DEBUG oslo_concurrency.lockutils [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.072s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.700605] env[61978]: DEBUG nova.virt.hardware [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1094.700888] env[61978]: DEBUG nova.virt.hardware [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1094.701616] env[61978]: DEBUG nova.virt.hardware [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1094.701864] env[61978]: DEBUG nova.virt.hardware [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1094.702032] env[61978]: DEBUG nova.virt.hardware [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1094.702196] env[61978]: DEBUG nova.virt.hardware [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1094.702438] env[61978]: DEBUG nova.virt.hardware [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1094.703341] env[61978]: DEBUG nova.virt.hardware [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1094.703341] env[61978]: DEBUG nova.virt.hardware [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1094.703341] env[61978]: DEBUG nova.virt.hardware [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1094.703341] env[61978]: DEBUG nova.virt.hardware [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1094.703696] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.748s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.704198] env[61978]: DEBUG nova.objects.instance [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lazy-loading 'resources' on Instance uuid 7e6178cf-b7be-46f8-8f8c-8605a09703c7 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.709024] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee09ba66-80cd-43a0-aec8-52d95c0f1f79 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.717062] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694756a6-49b0-4cdb-8788-5e321b2de821 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.738255] env[61978]: INFO nova.scheduler.client.report [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Deleted allocations for instance f3a9f204-e4ed-49f1-85ef-8cea7377cf89 [ 1094.784077] env[61978]: DEBUG nova.network.neutron [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Updating instance_info_cache with network_info: [{"id": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "address": "fa:16:3e:1e:db:c1", "network": {"id": "aa63ec3f-fde3-49f2-ab12-71ae85601428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-445619089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198eab494c0a4e0eb83bae5732df9c78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "650f7968-4522-4ba5-8304-1b9949951ed7", "external-id": "nsx-vlan-transportzone-568", "segmentation_id": 568, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbba3eeec-25", "ovs_interfaceid": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.883269] env[61978]: DEBUG oslo_vmware.api [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Task: {'id': task-1395355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.260406} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.883269] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1094.883269] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1094.883269] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1094.883269] env[61978]: INFO nova.compute.manager [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1094.883497] env[61978]: DEBUG oslo.service.loopingcall [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1094.884539] env[61978]: DEBUG nova.compute.manager [-] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1094.884539] env[61978]: DEBUG nova.network.neutron [-] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1094.899839] env[61978]: DEBUG nova.compute.manager [req-1b3e1cc5-9bf9-422d-9745-564cad1e6383 req-c7ff1409-12ea-4f23-82e1-0b2017f0f37b service nova] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Received event network-vif-deleted-4bd8d0bd-32e6-47a0-9308-f8aebe253aa4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1095.195305] env[61978]: INFO nova.compute.manager [-] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Took 2.00 seconds to deallocate network for instance. [ 1095.245970] env[61978]: DEBUG oslo_concurrency.lockutils [None req-786da1b8-e987-4511-8b8f-9df63f803db7 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "f3a9f204-e4ed-49f1-85ef-8cea7377cf89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.455s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.286840] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Releasing lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.287232] env[61978]: DEBUG nova.compute.manager [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Instance network_info: |[{"id": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "address": "fa:16:3e:1e:db:c1", "network": {"id": "aa63ec3f-fde3-49f2-ab12-71ae85601428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-445619089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198eab494c0a4e0eb83bae5732df9c78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "650f7968-4522-4ba5-8304-1b9949951ed7", "external-id": "nsx-vlan-transportzone-568", "segmentation_id": 568, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbba3eeec-25", "ovs_interfaceid": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1095.287639] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:db:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '650f7968-4522-4ba5-8304-1b9949951ed7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bba3eeec-259f-4ea3-b0f6-e509a29d33f4', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1095.295492] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Creating folder: Project (198eab494c0a4e0eb83bae5732df9c78). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1095.298430] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fcdff9f2-d089-4804-af20-176e59075b9e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.310703] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Created folder: Project (198eab494c0a4e0eb83bae5732df9c78) in parent group-v295764. [ 1095.311426] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Creating folder: Instances. Parent ref: group-v295948. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1095.311426] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4749e2a6-8ce8-43a8-be7b-4c16e16365a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.324096] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Created folder: Instances in parent group-v295948. [ 1095.324554] env[61978]: DEBUG oslo.service.loopingcall [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1095.324554] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1095.324819] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a874aae-6cf0-491c-b232-97d2610d9cde {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.346998] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1095.346998] env[61978]: value = "task-1395358" [ 1095.346998] env[61978]: _type = "Task" [ 1095.346998] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.354992] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395358, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.537292] env[61978]: DEBUG nova.network.neutron [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Successfully updated port: e8b9a7d5-25ce-419f-b3e9-9179be86e340 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1095.634234] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634d6ac5-074b-406e-a4a9-446ef5b1b743 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.643308] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52d3603-503d-45b8-a756-1a1b0d447435 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.676637] env[61978]: DEBUG nova.network.neutron [-] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.679047] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1177e9ef-7a7f-4f55-8366-19c14c9aae52 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.687741] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd73170-40b4-4b0a-aeab-d49c8dae7f9c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.703375] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.703958] env[61978]: DEBUG nova.compute.provider_tree [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.857836] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395358, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.039892] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "refresh_cache-cf6d8815-ed87-4629-9df9-6f406ac2fe6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.041023] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired lock "refresh_cache-cf6d8815-ed87-4629-9df9-6f406ac2fe6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.041023] env[61978]: DEBUG nova.network.neutron [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1096.179627] env[61978]: INFO nova.compute.manager [-] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Took 1.30 seconds to deallocate network for instance. [ 1096.207225] env[61978]: DEBUG nova.scheduler.client.report [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1096.358198] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395358, 'name': CreateVM_Task, 'duration_secs': 0.648707} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.358579] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1096.359430] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.359769] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.360226] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1096.360619] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53075caa-c5cc-4122-bf80-16fa09af7e05 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.367352] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1096.367352] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52411713-5793-fdcd-b334-ed91d033d183" [ 1096.367352] env[61978]: _type = "Task" [ 1096.367352] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.374353] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52411713-5793-fdcd-b334-ed91d033d183, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.572872] env[61978]: DEBUG nova.network.neutron [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1096.687275] env[61978]: DEBUG oslo_concurrency.lockutils [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.700437] env[61978]: DEBUG nova.network.neutron [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Updating instance_info_cache with network_info: [{"id": "e8b9a7d5-25ce-419f-b3e9-9179be86e340", "address": "fa:16:3e:6c:d3:d1", "network": {"id": "a4bfbe6c-0ede-463d-935f-52cc2f1e3692", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1615190107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3c3fe7c7f560427db0f814a2c67bb527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5215e5b-294b-4e8c-bd06-355e9955ab1d", "external-id": "nsx-vlan-transportzone-529", "segmentation_id": 529, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8b9a7d5-25", "ovs_interfaceid": "e8b9a7d5-25ce-419f-b3e9-9179be86e340", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.713232] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.716858] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.765s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.718680] env[61978]: INFO nova.compute.claims [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1096.751772] env[61978]: INFO nova.scheduler.client.report [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Deleted allocations for instance 7e6178cf-b7be-46f8-8f8c-8605a09703c7 [ 1096.866622] env[61978]: DEBUG oslo_vmware.rw_handles [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e2e1b3-ffa4-85ec-cbe0-b11414c2cb6b/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1096.867620] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a03ec60-f643-4019-bbd9-420fdf6cd674 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.879942] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52411713-5793-fdcd-b334-ed91d033d183, 'name': SearchDatastore_Task, 'duration_secs': 0.010193} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.880191] env[61978]: DEBUG oslo_vmware.rw_handles [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e2e1b3-ffa4-85ec-cbe0-b11414c2cb6b/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1096.880351] env[61978]: ERROR oslo_vmware.rw_handles [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e2e1b3-ffa4-85ec-cbe0-b11414c2cb6b/disk-0.vmdk due to incomplete transfer. [ 1096.880649] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.880884] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1096.881143] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.881296] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.881475] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1096.881699] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-54c9341c-cbd5-48e0-9653-5f394e9164f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.883028] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4963c586-7d52-459c-b1c2-7a4ffca82dfd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.889759] env[61978]: DEBUG oslo_vmware.rw_handles [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e2e1b3-ffa4-85ec-cbe0-b11414c2cb6b/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1096.889995] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Uploaded image 2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1096.892136] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1096.893014] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b5099209-67a8-46c1-8787-afaf7e3a062c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.894355] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1096.894531] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1096.895192] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36f20ebd-39cb-425c-824d-913ef3a47ce5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.900549] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1096.900549] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5295378a-ba2b-1bf4-1926-b11134cc1da8" [ 1096.900549] env[61978]: _type = "Task" [ 1096.900549] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.902027] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1096.902027] env[61978]: value = "task-1395359" [ 1096.902027] env[61978]: _type = "Task" [ 1096.902027] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.913968] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395359, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.917013] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5295378a-ba2b-1bf4-1926-b11134cc1da8, 'name': SearchDatastore_Task, 'duration_secs': 0.009188} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.917795] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51fbd3c0-b7c4-47d1-bbb8-0c06ec0b28ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.922516] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1096.922516] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eedf50-c9d4-8d50-15bd-5af05b000443" [ 1096.922516] env[61978]: _type = "Task" [ 1096.922516] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.932498] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eedf50-c9d4-8d50-15bd-5af05b000443, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.934705] env[61978]: DEBUG nova.compute.manager [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Received event network-vif-deleted-7c209a6b-d93f-45a5-b1df-3b7244e3a624 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1096.934873] env[61978]: DEBUG nova.compute.manager [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Received event network-vif-plugged-e8b9a7d5-25ce-419f-b3e9-9179be86e340 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1096.935087] env[61978]: DEBUG oslo_concurrency.lockutils [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] Acquiring lock "cf6d8815-ed87-4629-9df9-6f406ac2fe6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.935304] env[61978]: DEBUG oslo_concurrency.lockutils [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] Lock "cf6d8815-ed87-4629-9df9-6f406ac2fe6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.935493] env[61978]: DEBUG oslo_concurrency.lockutils [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] Lock "cf6d8815-ed87-4629-9df9-6f406ac2fe6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.935727] env[61978]: DEBUG nova.compute.manager [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] No waiting events found dispatching network-vif-plugged-e8b9a7d5-25ce-419f-b3e9-9179be86e340 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1096.935883] env[61978]: WARNING nova.compute.manager [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Received unexpected event network-vif-plugged-e8b9a7d5-25ce-419f-b3e9-9179be86e340 for instance with vm_state building and task_state spawning. [ 1096.936065] env[61978]: DEBUG nova.compute.manager [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Received event network-changed-e8b9a7d5-25ce-419f-b3e9-9179be86e340 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1096.936240] env[61978]: DEBUG nova.compute.manager [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Refreshing instance network info cache due to event network-changed-e8b9a7d5-25ce-419f-b3e9-9179be86e340. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1096.936416] env[61978]: DEBUG oslo_concurrency.lockutils [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] Acquiring lock "refresh_cache-cf6d8815-ed87-4629-9df9-6f406ac2fe6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.202687] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Releasing lock "refresh_cache-cf6d8815-ed87-4629-9df9-6f406ac2fe6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1097.203568] env[61978]: DEBUG nova.compute.manager [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Instance network_info: |[{"id": "e8b9a7d5-25ce-419f-b3e9-9179be86e340", "address": "fa:16:3e:6c:d3:d1", "network": {"id": "a4bfbe6c-0ede-463d-935f-52cc2f1e3692", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1615190107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3c3fe7c7f560427db0f814a2c67bb527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5215e5b-294b-4e8c-bd06-355e9955ab1d", "external-id": "nsx-vlan-transportzone-529", "segmentation_id": 529, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8b9a7d5-25", "ovs_interfaceid": "e8b9a7d5-25ce-419f-b3e9-9179be86e340", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1097.203568] env[61978]: DEBUG oslo_concurrency.lockutils [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] Acquired lock "refresh_cache-cf6d8815-ed87-4629-9df9-6f406ac2fe6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.203568] env[61978]: DEBUG nova.network.neutron [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Refreshing network info cache for port e8b9a7d5-25ce-419f-b3e9-9179be86e340 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1097.204819] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:d3:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5215e5b-294b-4e8c-bd06-355e9955ab1d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8b9a7d5-25ce-419f-b3e9-9179be86e340', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1097.212916] env[61978]: DEBUG oslo.service.loopingcall [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1097.213952] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1097.214213] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09bf16d4-4451-4952-b489-ecb71f82d528 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.237549] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1097.237549] env[61978]: value = "task-1395360" [ 1097.237549] env[61978]: _type = "Task" [ 1097.237549] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.247911] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395360, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.264442] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89252d88-bf62-4b76-a0a4-80ae7fe173e0 tempest-MultipleCreateTestJSON-1679774182 tempest-MultipleCreateTestJSON-1679774182-project-member] Lock "7e6178cf-b7be-46f8-8f8c-8605a09703c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.403s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.414900] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395359, 'name': Destroy_Task, 'duration_secs': 0.344805} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.415202] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Destroyed the VM [ 1097.415451] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1097.415860] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ff11ce6d-2e7f-46d0-bb87-bec163c95b87 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.422024] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1097.422024] env[61978]: value = "task-1395361" [ 1097.422024] env[61978]: _type = "Task" [ 1097.422024] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.433582] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395361, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.437518] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eedf50-c9d4-8d50-15bd-5af05b000443, 'name': SearchDatastore_Task, 'duration_secs': 0.009033} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.437796] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1097.438081] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 17c56c1c-9992-4559-ad23-c68909ae6792/17c56c1c-9992-4559-ad23-c68909ae6792.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1097.438360] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-68c3cfc6-8c2e-4f4b-9bd9-a91833317e80 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.444888] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1097.444888] env[61978]: value = "task-1395362" [ 1097.444888] env[61978]: _type = "Task" [ 1097.444888] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.454608] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.749804] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395360, 'name': CreateVM_Task, 'duration_secs': 0.396771} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.750081] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1097.752459] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.752653] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.752992] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1097.753295] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60fcc244-e4f2-4c66-9421-b66c440482fb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.758544] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1097.758544] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528c182a-db62-414a-2e6e-703f9d811c89" [ 1097.758544] env[61978]: _type = "Task" [ 1097.758544] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.775970] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528c182a-db62-414a-2e6e-703f9d811c89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.935217] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395361, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.961278] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395362, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.097743] env[61978]: DEBUG nova.network.neutron [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Updated VIF entry in instance network info cache for port e8b9a7d5-25ce-419f-b3e9-9179be86e340. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1098.098151] env[61978]: DEBUG nova.network.neutron [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Updating instance_info_cache with network_info: [{"id": "e8b9a7d5-25ce-419f-b3e9-9179be86e340", "address": "fa:16:3e:6c:d3:d1", "network": {"id": "a4bfbe6c-0ede-463d-935f-52cc2f1e3692", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1615190107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3c3fe7c7f560427db0f814a2c67bb527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5215e5b-294b-4e8c-bd06-355e9955ab1d", "external-id": "nsx-vlan-transportzone-529", "segmentation_id": 529, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8b9a7d5-25", "ovs_interfaceid": "e8b9a7d5-25ce-419f-b3e9-9179be86e340", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.167191] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31c0cab-d349-4fc4-8344-37e223b186ca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.181466] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a846b7b-b429-4fd7-ae50-624627521bb3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.210898] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc946228-30f7-4149-abf6-e38ece68ba94 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.217647] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8327f88f-bc2e-4aa8-936e-06e32b42a7ca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.231210] env[61978]: DEBUG nova.compute.provider_tree [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.271748] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528c182a-db62-414a-2e6e-703f9d811c89, 'name': SearchDatastore_Task, 'duration_secs': 0.016301} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.271829] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.272072] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1098.272349] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.272535] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.272869] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1098.273325] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-573e2f63-c524-4352-bb55-74e67a2641e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.285090] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1098.285209] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1098.285996] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18d0f9f4-053f-4134-bfe8-45e519152a65 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.291279] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1098.291279] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52903771-e55b-33d5-c464-314fecb135d3" [ 1098.291279] env[61978]: _type = "Task" [ 1098.291279] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.299204] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52903771-e55b-33d5-c464-314fecb135d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.434181] env[61978]: DEBUG oslo_vmware.api [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395361, 'name': RemoveSnapshot_Task, 'duration_secs': 0.677414} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.434538] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1098.434824] env[61978]: INFO nova.compute.manager [None req-9a2edc9d-3600-46a1-b447-b8163d9c0112 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Took 15.07 seconds to snapshot the instance on the hypervisor. [ 1098.458228] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395362, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516932} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.458853] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 17c56c1c-9992-4559-ad23-c68909ae6792/17c56c1c-9992-4559-ad23-c68909ae6792.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1098.459102] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1098.459375] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b72d6d11-32f6-4e17-8c1c-88ea04704d7d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.466470] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1098.466470] env[61978]: value = "task-1395363" [ 1098.466470] env[61978]: _type = "Task" [ 1098.466470] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.474978] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.491367] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "de0f46af-870a-4095-a417-913a2c51f66b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.491367] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "de0f46af-870a-4095-a417-913a2c51f66b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.601231] env[61978]: DEBUG oslo_concurrency.lockutils [req-ed628848-ff27-4b0f-a4eb-94d56c72bd97 req-2a87ed78-e730-436c-ad05-351adc62e8e1 service nova] Releasing lock "refresh_cache-cf6d8815-ed87-4629-9df9-6f406ac2fe6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.734776] env[61978]: DEBUG nova.scheduler.client.report [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1098.802180] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52903771-e55b-33d5-c464-314fecb135d3, 'name': SearchDatastore_Task, 'duration_secs': 0.011468} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.803051] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d772d24-df36-43bd-885a-edfc0735b196 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.809178] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1098.809178] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5251e77c-cdf9-c9a6-c286-a51906bb27eb" [ 1098.809178] env[61978]: _type = "Task" [ 1098.809178] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.817524] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5251e77c-cdf9-c9a6-c286-a51906bb27eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.977281] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069081} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.978052] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1098.979281] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c5f600-f0a4-4892-b0d7-28ad5b592cb0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.004818] env[61978]: DEBUG nova.compute.manager [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1099.012416] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 17c56c1c-9992-4559-ad23-c68909ae6792/17c56c1c-9992-4559-ad23-c68909ae6792.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.014027] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e76894a-db2a-4e67-a7f7-b7e74cc49245 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.035651] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1099.035651] env[61978]: value = "task-1395364" [ 1099.035651] env[61978]: _type = "Task" [ 1099.035651] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.045409] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395364, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.241062] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.525s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.242029] env[61978]: DEBUG nova.compute.manager [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1099.244850] env[61978]: DEBUG oslo_concurrency.lockutils [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.427s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.245185] env[61978]: DEBUG nova.objects.instance [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lazy-loading 'resources' on Instance uuid a0ea73d1-a613-4403-8527-a8b81a619adf {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1099.320624] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5251e77c-cdf9-c9a6-c286-a51906bb27eb, 'name': SearchDatastore_Task, 'duration_secs': 0.009986} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.320624] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.320624] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] cf6d8815-ed87-4629-9df9-6f406ac2fe6e/cf6d8815-ed87-4629-9df9-6f406ac2fe6e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1099.320824] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd4a8b06-5142-4dd5-b147-7a240ae0e0b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.327870] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1099.327870] env[61978]: value = "task-1395365" [ 1099.327870] env[61978]: _type = "Task" [ 1099.327870] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.335453] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395365, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.533542] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.547911] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395364, 'name': ReconfigVM_Task, 'duration_secs': 0.276639} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.547911] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 17c56c1c-9992-4559-ad23-c68909ae6792/17c56c1c-9992-4559-ad23-c68909ae6792.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1099.548577] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-841ae18c-5982-4871-a8d6-6b00d7ae8c15 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.557939] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1099.557939] env[61978]: value = "task-1395366" [ 1099.557939] env[61978]: _type = "Task" [ 1099.557939] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.570808] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395366, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.750110] env[61978]: DEBUG nova.compute.utils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1099.754655] env[61978]: DEBUG nova.compute.manager [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1099.754845] env[61978]: DEBUG nova.network.neutron [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1099.805867] env[61978]: DEBUG nova.policy [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2ebd638c24f4a5d959ad19796744b37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '26cb7552530047c5867347d62195121e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1099.839627] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395365, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.069860] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395366, 'name': Rename_Task, 'duration_secs': 0.166722} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.069860] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1100.070017] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d79f3366-f88b-4e89-9d29-0378e80c20ec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.079537] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1100.079537] env[61978]: value = "task-1395367" [ 1100.079537] env[61978]: _type = "Task" [ 1100.079537] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.092693] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395367, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.093471] env[61978]: DEBUG nova.network.neutron [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Successfully created port: 4f64e24d-64d3-4410-8f23-96ae24053c6c {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1100.109948] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5440741e-f041-4a47-9525-271520035834 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.117874] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64167a0e-529c-473e-b8b8-e5804e290ae0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.152789] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0fdc16a-e526-437d-a302-a44865f04724 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.160103] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab13bbe-f630-47f0-837d-f308dcf916c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.165130] env[61978]: DEBUG oslo_concurrency.lockutils [None req-54b5979d-de27-45a5-be75-fde0e5f8e0e9 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.165425] env[61978]: DEBUG oslo_concurrency.lockutils [None req-54b5979d-de27-45a5-be75-fde0e5f8e0e9 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.165599] env[61978]: DEBUG nova.compute.manager [None req-54b5979d-de27-45a5-be75-fde0e5f8e0e9 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1100.166384] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48b8d1c-a72b-4172-8b60-6c4b66162e6a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.178906] env[61978]: DEBUG nova.compute.provider_tree [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.181788] env[61978]: DEBUG nova.compute.manager [None req-54b5979d-de27-45a5-be75-fde0e5f8e0e9 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61978) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1100.182329] env[61978]: DEBUG nova.objects.instance [None req-54b5979d-de27-45a5-be75-fde0e5f8e0e9 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'flavor' on Instance uuid 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1100.255442] env[61978]: DEBUG nova.compute.manager [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1100.341471] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395365, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537224} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.341796] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] cf6d8815-ed87-4629-9df9-6f406ac2fe6e/cf6d8815-ed87-4629-9df9-6f406ac2fe6e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1100.342073] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1100.342560] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dc252700-5333-43a3-8faa-fcc5d5b2422f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.349046] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1100.349046] env[61978]: value = "task-1395368" [ 1100.349046] env[61978]: _type = "Task" [ 1100.349046] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.357669] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395368, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.365054] env[61978]: DEBUG nova.network.neutron [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Successfully created port: 35ad6d98-9443-41d0-8356-d62f53f931fb {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1100.590388] env[61978]: DEBUG oslo_vmware.api [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395367, 'name': PowerOnVM_Task, 'duration_secs': 0.485708} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.590779] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1100.591140] env[61978]: INFO nova.compute.manager [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1100.591487] env[61978]: DEBUG nova.compute.manager [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1100.594019] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4c4375-60a1-4a69-aee8-68e67a6a08b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.685639] env[61978]: DEBUG nova.scheduler.client.report [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1100.691488] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-54b5979d-de27-45a5-be75-fde0e5f8e0e9 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1100.692086] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ad7a176-fd88-4aa2-bef2-c13f10054921 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.700264] env[61978]: DEBUG oslo_vmware.api [None req-54b5979d-de27-45a5-be75-fde0e5f8e0e9 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1100.700264] env[61978]: value = "task-1395369" [ 1100.700264] env[61978]: _type = "Task" [ 1100.700264] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.710628] env[61978]: DEBUG oslo_vmware.api [None req-54b5979d-de27-45a5-be75-fde0e5f8e0e9 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395369, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.859958] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395368, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066197} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.860681] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1100.861600] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fe645d-9aa1-4536-bdb2-93dd250c9f77 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.887093] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] cf6d8815-ed87-4629-9df9-6f406ac2fe6e/cf6d8815-ed87-4629-9df9-6f406ac2fe6e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1100.887441] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c5c211f-9a52-4227-acd1-e65a2ffed5b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.909315] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1100.909315] env[61978]: value = "task-1395370" [ 1100.909315] env[61978]: _type = "Task" [ 1100.909315] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.917512] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395370, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.109093] env[61978]: INFO nova.compute.manager [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Took 37.00 seconds to build instance. [ 1101.192419] env[61978]: DEBUG oslo_concurrency.lockutils [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.947s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.195405] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.521s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.195647] env[61978]: DEBUG nova.objects.instance [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61978) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1101.209939] env[61978]: DEBUG oslo_vmware.api [None req-54b5979d-de27-45a5-be75-fde0e5f8e0e9 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395369, 'name': PowerOffVM_Task, 'duration_secs': 0.210094} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.210231] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-54b5979d-de27-45a5-be75-fde0e5f8e0e9 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1101.210421] env[61978]: DEBUG nova.compute.manager [None req-54b5979d-de27-45a5-be75-fde0e5f8e0e9 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1101.211299] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd58874c-d484-4f39-99e6-ca9982333dd9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.216189] env[61978]: INFO nova.scheduler.client.report [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted allocations for instance a0ea73d1-a613-4403-8527-a8b81a619adf [ 1101.264193] env[61978]: DEBUG nova.compute.manager [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1101.293179] env[61978]: DEBUG nova.virt.hardware [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1101.293438] env[61978]: DEBUG nova.virt.hardware [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1101.293604] env[61978]: DEBUG nova.virt.hardware [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1101.293792] env[61978]: DEBUG nova.virt.hardware [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1101.293955] env[61978]: DEBUG nova.virt.hardware [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1101.294125] env[61978]: DEBUG nova.virt.hardware [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1101.294356] env[61978]: DEBUG nova.virt.hardware [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1101.294521] env[61978]: DEBUG nova.virt.hardware [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1101.294698] env[61978]: DEBUG nova.virt.hardware [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1101.294934] env[61978]: DEBUG nova.virt.hardware [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1101.295055] env[61978]: DEBUG nova.virt.hardware [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1101.296341] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f5fe95-c2f8-4835-a039-64dff74ab44f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.304684] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76008dc-9319-4707-bd51-65c97e643d63 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.422788] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395370, 'name': ReconfigVM_Task, 'duration_secs': 0.313297} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.423271] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Reconfigured VM instance instance-00000044 to attach disk [datastore2] cf6d8815-ed87-4629-9df9-6f406ac2fe6e/cf6d8815-ed87-4629-9df9-6f406ac2fe6e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1101.424390] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a531e74-724f-4f28-9ccd-2521258f3bc5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.431826] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1101.431826] env[61978]: value = "task-1395371" [ 1101.431826] env[61978]: _type = "Task" [ 1101.431826] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.441881] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395371, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.611683] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5b57e0e-3572-4c97-b858-78d000e09b4a tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "17c56c1c-9992-4559-ad23-c68909ae6792" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.513s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.727814] env[61978]: DEBUG oslo_concurrency.lockutils [None req-163f59d2-06c7-4784-8188-fa6e427b29f7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "a0ea73d1-a613-4403-8527-a8b81a619adf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.289s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.729160] env[61978]: DEBUG oslo_concurrency.lockutils [None req-54b5979d-de27-45a5-be75-fde0e5f8e0e9 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.564s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.864201] env[61978]: DEBUG nova.network.neutron [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Successfully updated port: 4f64e24d-64d3-4410-8f23-96ae24053c6c {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1101.942915] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395371, 'name': Rename_Task, 'duration_secs': 0.14928} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.943382] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1101.944142] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5c79f50-167f-4b08-9574-b568de7ef96a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.950790] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1101.950790] env[61978]: value = "task-1395372" [ 1101.950790] env[61978]: _type = "Task" [ 1101.950790] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.961560] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.149632] env[61978]: DEBUG nova.compute.manager [req-2a5f687a-2cf3-46bc-a9b1-d684a92bc03c req-4f24f7bf-3246-4fcc-a578-ee492672024a service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Received event network-vif-plugged-4f64e24d-64d3-4410-8f23-96ae24053c6c {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1102.151023] env[61978]: DEBUG oslo_concurrency.lockutils [req-2a5f687a-2cf3-46bc-a9b1-d684a92bc03c req-4f24f7bf-3246-4fcc-a578-ee492672024a service nova] Acquiring lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.151023] env[61978]: DEBUG oslo_concurrency.lockutils [req-2a5f687a-2cf3-46bc-a9b1-d684a92bc03c req-4f24f7bf-3246-4fcc-a578-ee492672024a service nova] Lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.151023] env[61978]: DEBUG oslo_concurrency.lockutils [req-2a5f687a-2cf3-46bc-a9b1-d684a92bc03c req-4f24f7bf-3246-4fcc-a578-ee492672024a service nova] Lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.151023] env[61978]: DEBUG nova.compute.manager [req-2a5f687a-2cf3-46bc-a9b1-d684a92bc03c req-4f24f7bf-3246-4fcc-a578-ee492672024a service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] No waiting events found dispatching network-vif-plugged-4f64e24d-64d3-4410-8f23-96ae24053c6c {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1102.151023] env[61978]: WARNING nova.compute.manager [req-2a5f687a-2cf3-46bc-a9b1-d684a92bc03c req-4f24f7bf-3246-4fcc-a578-ee492672024a service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Received unexpected event network-vif-plugged-4f64e24d-64d3-4410-8f23-96ae24053c6c for instance with vm_state building and task_state spawning. [ 1102.206958] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c85bb86-b38a-4e9b-b131-bcfe915831be tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.206958] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.257s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.206958] env[61978]: DEBUG nova.objects.instance [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Lazy-loading 'resources' on Instance uuid eb7cb200-c162-4e92-8916-6d9abd5cf34d {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.461841] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395372, 'name': PowerOnVM_Task} progress is 87%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.743076] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.743076] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.904399] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "35a6d3ec-8688-43c2-93c4-b23033aaf280" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.904635] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "35a6d3ec-8688-43c2-93c4-b23033aaf280" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.967274] env[61978]: DEBUG oslo_vmware.api [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395372, 'name': PowerOnVM_Task, 'duration_secs': 0.712452} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.967545] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1102.967751] env[61978]: INFO nova.compute.manager [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1102.967969] env[61978]: DEBUG nova.compute.manager [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1102.968750] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594cd98f-27e0-4863-ba50-623bffc402a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.017683] env[61978]: DEBUG nova.objects.instance [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'flavor' on Instance uuid 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1103.080185] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff1665f-fffc-4861-b69c-af82b9d426d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.087906] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544ebb2e-9b4a-404a-bd6d-25ddb7d42b13 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.118540] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5fd46af-b20b-4696-92d7-dd0f290e6ce3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.126737] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed6b5d8-0180-44b5-ba99-4b67f4802ab1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.140197] env[61978]: DEBUG nova.compute.provider_tree [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.245871] env[61978]: DEBUG nova.compute.manager [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1103.409844] env[61978]: DEBUG nova.compute.manager [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1103.486703] env[61978]: INFO nova.compute.manager [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Took 38.55 seconds to build instance. [ 1103.528043] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1103.528043] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.528258] env[61978]: DEBUG nova.network.neutron [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1103.528415] env[61978]: DEBUG nova.objects.instance [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'info_cache' on Instance uuid 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1103.643879] env[61978]: DEBUG nova.scheduler.client.report [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1103.771038] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.923428] env[61978]: DEBUG nova.network.neutron [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Successfully updated port: 35ad6d98-9443-41d0-8356-d62f53f931fb {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1103.930974] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.988080] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d83037d3-7edd-4bc9-9b60-4f75e6071594 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "cf6d8815-ed87-4629-9df9-6f406ac2fe6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.064s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.032811] env[61978]: DEBUG nova.objects.base [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Object Instance<59f32dd0-1faa-4059-9ef3-b177e8f4fa4c> lazy-loaded attributes: flavor,info_cache {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1104.117323] env[61978]: INFO nova.compute.manager [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Rescuing [ 1104.117606] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "refresh_cache-cf6d8815-ed87-4629-9df9-6f406ac2fe6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.117801] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired lock "refresh_cache-cf6d8815-ed87-4629-9df9-6f406ac2fe6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.117985] env[61978]: DEBUG nova.network.neutron [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1104.149240] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.943s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.151514] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 19.772s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.172854] env[61978]: INFO nova.scheduler.client.report [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Deleted allocations for instance eb7cb200-c162-4e92-8916-6d9abd5cf34d [ 1104.179269] env[61978]: DEBUG nova.compute.manager [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Received event network-changed-4f64e24d-64d3-4410-8f23-96ae24053c6c {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1104.179462] env[61978]: DEBUG nova.compute.manager [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Refreshing instance network info cache due to event network-changed-4f64e24d-64d3-4410-8f23-96ae24053c6c. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1104.179689] env[61978]: DEBUG oslo_concurrency.lockutils [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] Acquiring lock "refresh_cache-c861eaa2-1c57-476f-92b3-886c8e44f6b4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.179843] env[61978]: DEBUG oslo_concurrency.lockutils [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] Acquired lock "refresh_cache-c861eaa2-1c57-476f-92b3-886c8e44f6b4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.180020] env[61978]: DEBUG nova.network.neutron [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Refreshing network info cache for port 4f64e24d-64d3-4410-8f23-96ae24053c6c {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1104.425726] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "refresh_cache-c861eaa2-1c57-476f-92b3-886c8e44f6b4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.689149] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25bd5adb-13af-4d2d-b042-ccd37e54f7ea tempest-VolumesAssistedSnapshotsTest-924664751 tempest-VolumesAssistedSnapshotsTest-924664751-project-member] Lock "eb7cb200-c162-4e92-8916-6d9abd5cf34d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.176s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.750083] env[61978]: DEBUG nova.network.neutron [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1104.874327] env[61978]: DEBUG nova.network.neutron [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.886052] env[61978]: DEBUG nova.network.neutron [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance_info_cache with network_info: [{"id": "7417d7e9-723d-408d-bfa4-e583af757e79", "address": "fa:16:3e:e6:e3:c6", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7417d7e9-72", "ovs_interfaceid": "7417d7e9-723d-408d-bfa4-e583af757e79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.909347] env[61978]: DEBUG nova.network.neutron [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Updating instance_info_cache with network_info: [{"id": "e8b9a7d5-25ce-419f-b3e9-9179be86e340", "address": "fa:16:3e:6c:d3:d1", "network": {"id": "a4bfbe6c-0ede-463d-935f-52cc2f1e3692", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1615190107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3c3fe7c7f560427db0f814a2c67bb527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5215e5b-294b-4e8c-bd06-355e9955ab1d", "external-id": "nsx-vlan-transportzone-529", "segmentation_id": 529, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8b9a7d5-25", "ovs_interfaceid": "e8b9a7d5-25ce-419f-b3e9-9179be86e340", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.169694] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Applying migration context for instance c17c986e-c008-4414-8dd1-4ea836458048 as it has an incoming, in-progress migration dfe6158b-18dd-4d7f-8f9a-1d1b35f9479f. Migration status is confirming {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1105.171521] env[61978]: INFO nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating resource usage from migration dfe6158b-18dd-4d7f-8f9a-1d1b35f9479f [ 1105.195222] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 92eb5edb-803b-48d4-8c4f-338d7c3b3d13 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1105.195400] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 243e7146-46fc-43f4-a83b-cdc58f397f9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.195588] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 96bef3f3-a45c-43ba-a86a-66c1d5686ea6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1105.195748] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b26a4784-698d-477a-8db7-58156899d231 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1105.195877] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f3c837fb-be7e-40a6-aae4-7f213c62ab2c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1105.196015] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.196142] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.196268] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance bdfdd685-e440-4f53-b6c4-2ee2f06acba8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.196393] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f1001633-e4e5-4de1-8a6b-cf653e43d821 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1105.196534] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.196669] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b356fc81-f857-4416-8eb0-28c66d137967 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1105.196786] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 4c7053ee-7c44-49ee-8d30-bf14686c6b1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.196899] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 9ee04ee8-98ec-4be9-935d-cad7cd176466 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.197018] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 0cdff646-34ad-49d5-b775-28e8e7ce778e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.197136] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b932d221-aca9-4853-aa9c-2d27981e878c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.197271] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f4034944-3a9d-4e14-a545-0bf574465e0b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1105.197394] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance a1087abd-28d1-40ac-96ab-dc38392d027c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.197507] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Migration dfe6158b-18dd-4d7f-8f9a-1d1b35f9479f is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1105.197617] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance c17c986e-c008-4414-8dd1-4ea836458048 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.197844] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance fdd0c16d-b0f8-4f81-9069-34d11f273acb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1105.197964] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 17c56c1c-9992-4559-ad23-c68909ae6792 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.198092] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance cf6d8815-ed87-4629-9df9-6f406ac2fe6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.198231] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance c861eaa2-1c57-476f-92b3-886c8e44f6b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1105.376868] env[61978]: DEBUG oslo_concurrency.lockutils [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] Releasing lock "refresh_cache-c861eaa2-1c57-476f-92b3-886c8e44f6b4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.377171] env[61978]: DEBUG nova.compute.manager [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Received event network-vif-plugged-35ad6d98-9443-41d0-8356-d62f53f931fb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1105.377378] env[61978]: DEBUG oslo_concurrency.lockutils [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] Acquiring lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.377633] env[61978]: DEBUG oslo_concurrency.lockutils [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] Lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.377836] env[61978]: DEBUG oslo_concurrency.lockutils [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] Lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.378029] env[61978]: DEBUG nova.compute.manager [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] No waiting events found dispatching network-vif-plugged-35ad6d98-9443-41d0-8356-d62f53f931fb {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1105.378213] env[61978]: WARNING nova.compute.manager [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Received unexpected event network-vif-plugged-35ad6d98-9443-41d0-8356-d62f53f931fb for instance with vm_state building and task_state spawning. [ 1105.378385] env[61978]: DEBUG nova.compute.manager [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Received event network-changed-35ad6d98-9443-41d0-8356-d62f53f931fb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1105.378547] env[61978]: DEBUG nova.compute.manager [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Refreshing instance network info cache due to event network-changed-35ad6d98-9443-41d0-8356-d62f53f931fb. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1105.378774] env[61978]: DEBUG oslo_concurrency.lockutils [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] Acquiring lock "refresh_cache-c861eaa2-1c57-476f-92b3-886c8e44f6b4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.378957] env[61978]: DEBUG oslo_concurrency.lockutils [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] Acquired lock "refresh_cache-c861eaa2-1c57-476f-92b3-886c8e44f6b4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.379138] env[61978]: DEBUG nova.network.neutron [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Refreshing network info cache for port 35ad6d98-9443-41d0-8356-d62f53f931fb {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1105.387916] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.412019] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Releasing lock "refresh_cache-cf6d8815-ed87-4629-9df9-6f406ac2fe6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.701072] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance ae6b92bb-6f79-4b52-bdb7-095985bf2fad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1105.891311] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.891603] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db13d59a-a20c-47c7-bc62-b0d9198c32af {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.899209] env[61978]: DEBUG oslo_vmware.api [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1105.899209] env[61978]: value = "task-1395373" [ 1105.899209] env[61978]: _type = "Task" [ 1105.899209] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.906479] env[61978]: DEBUG oslo_vmware.api [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395373, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.914203] env[61978]: DEBUG nova.network.neutron [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1105.944558] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.944820] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95c02118-9e2e-4da3-91d5-d4b7f29c6515 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.951984] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1105.951984] env[61978]: value = "task-1395374" [ 1105.951984] env[61978]: _type = "Task" [ 1105.951984] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.960447] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.002505] env[61978]: DEBUG nova.network.neutron [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.206704] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance de0f46af-870a-4095-a417-913a2c51f66b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.409605] env[61978]: DEBUG oslo_vmware.api [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395373, 'name': PowerOnVM_Task, 'duration_secs': 0.425679} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.409886] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1106.410098] env[61978]: DEBUG nova.compute.manager [None req-c8525755-e958-4f23-9985-154905407e0b tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1106.410844] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b97780-3c1a-4d74-9175-d4b45cc434dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.462989] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395374, 'name': PowerOffVM_Task, 'duration_secs': 0.185586} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.463586] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1106.464105] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4614ae20-5463-4b2d-a275-cc7ac0450325 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.483587] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd37e75d-2a3c-4ed4-a039-1cdfd3664070 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.506655] env[61978]: DEBUG oslo_concurrency.lockutils [req-7d7db17a-e767-440e-8826-6ac1e3b627c0 req-0001ec0d-5712-4cba-9986-6aa49c6786d6 service nova] Releasing lock "refresh_cache-c861eaa2-1c57-476f-92b3-886c8e44f6b4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1106.507166] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquired lock "refresh_cache-c861eaa2-1c57-476f-92b3-886c8e44f6b4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.507236] env[61978]: DEBUG nova.network.neutron [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1106.517223] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1106.517496] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-435f148a-0737-4511-b732-f8e1355aa179 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.527475] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1106.527475] env[61978]: value = "task-1395375" [ 1106.527475] env[61978]: _type = "Task" [ 1106.527475] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.539017] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1106.539017] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1106.539017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.539017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.539017] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1106.539017] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-463c97f4-f3dd-4b40-9167-8c2eef39e7c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.549629] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1106.549629] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1106.550044] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4da1bce-7209-4c05-be3e-fd0e613cdb03 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.558186] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1106.558186] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c6c12c-3414-0569-0fe2-213c447d4f3b" [ 1106.558186] env[61978]: _type = "Task" [ 1106.558186] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.565360] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c6c12c-3414-0569-0fe2-213c447d4f3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.713038] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.069023] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c6c12c-3414-0569-0fe2-213c447d4f3b, 'name': SearchDatastore_Task, 'duration_secs': 0.034783} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.069825] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b1ee9ce-febd-4600-b808-065b960a358b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.075683] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1107.075683] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cb47c7-bdde-991f-f702-a7ad561b8aca" [ 1107.075683] env[61978]: _type = "Task" [ 1107.075683] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.084247] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cb47c7-bdde-991f-f702-a7ad561b8aca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.101302] env[61978]: DEBUG nova.network.neutron [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1107.217308] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 35a6d3ec-8688-43c2-93c4-b23033aaf280 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.217308] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1107.217308] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3456MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1107.561086] env[61978]: DEBUG nova.network.neutron [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Updating instance_info_cache with network_info: [{"id": "4f64e24d-64d3-4410-8f23-96ae24053c6c", "address": "fa:16:3e:a2:2a:5c", "network": {"id": "50659875-d923-4ea6-9b51-86e6682a8a7e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2117211927", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f64e24d-64", "ovs_interfaceid": "4f64e24d-64d3-4410-8f23-96ae24053c6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "35ad6d98-9443-41d0-8356-d62f53f931fb", "address": "fa:16:3e:34:2f:ed", "network": {"id": "7ea59c49-3f2d-442c-9d6a-e0bccc3a5062", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-364359999", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35ad6d98-94", "ovs_interfaceid": "35ad6d98-9443-41d0-8356-d62f53f931fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.587221] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cb47c7-bdde-991f-f702-a7ad561b8aca, 'name': SearchDatastore_Task, 'duration_secs': 0.02283} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.590451] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.590727] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] cf6d8815-ed87-4629-9df9-6f406ac2fe6e/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk. {{(pid=61978) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1107.591516] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a6deac6-2058-4352-9666-e66f97e89e32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.599196] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1107.599196] env[61978]: value = "task-1395376" [ 1107.599196] env[61978]: _type = "Task" [ 1107.599196] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.609830] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395376, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.642917] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364c62b4-3346-47e9-bfff-0153d334dcff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.651911] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a5f725-9238-4387-afec-2cb6284e4253 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.696654] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd45289-2974-4a72-9a65-0ccd020a766e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.701953] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3beaecc-109f-4fd3-a2b7-8622592eb157 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.721121] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.064323] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Releasing lock "refresh_cache-c861eaa2-1c57-476f-92b3-886c8e44f6b4" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.064789] env[61978]: DEBUG nova.compute.manager [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Instance network_info: |[{"id": "4f64e24d-64d3-4410-8f23-96ae24053c6c", "address": "fa:16:3e:a2:2a:5c", "network": {"id": "50659875-d923-4ea6-9b51-86e6682a8a7e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2117211927", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f64e24d-64", "ovs_interfaceid": "4f64e24d-64d3-4410-8f23-96ae24053c6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "35ad6d98-9443-41d0-8356-d62f53f931fb", "address": "fa:16:3e:34:2f:ed", "network": {"id": "7ea59c49-3f2d-442c-9d6a-e0bccc3a5062", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-364359999", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35ad6d98-94", "ovs_interfaceid": "35ad6d98-9443-41d0-8356-d62f53f931fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1108.065345] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:2a:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '669e4919-e0ad-4e23-9f23-4c5f2be0d858', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f64e24d-64d3-4410-8f23-96ae24053c6c', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:2f:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de7fa486-5f28-44ae-b0cf-72234ff87546', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35ad6d98-9443-41d0-8356-d62f53f931fb', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1108.078264] env[61978]: DEBUG oslo.service.loopingcall [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1108.078696] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1108.079018] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78d45253-60f5-4273-b347-126d6b9ed2ce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.113631] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395376, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499595} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.115135] env[61978]: INFO nova.virt.vmwareapi.ds_util [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] cf6d8815-ed87-4629-9df9-6f406ac2fe6e/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk. [ 1108.115438] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1108.115438] env[61978]: value = "task-1395377" [ 1108.115438] env[61978]: _type = "Task" [ 1108.115438] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.116219] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a967ac28-e209-47cd-a62c-ef2b4d3a45e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.152381] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] cf6d8815-ed87-4629-9df9-6f406ac2fe6e/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1108.155821] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71e5e7be-04dd-4f2f-8211-e39fe76c5539 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.169874] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395377, 'name': CreateVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.177396] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1108.177396] env[61978]: value = "task-1395378" [ 1108.177396] env[61978]: _type = "Task" [ 1108.177396] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.189852] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395378, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.224047] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1108.629261] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395377, 'name': CreateVM_Task, 'duration_secs': 0.392911} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.629336] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1108.630135] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.630302] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.630626] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1108.630888] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3649a52e-36ae-47f2-a241-9fe4bda42c0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.636300] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1108.636300] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5232f069-b9a0-993e-77dc-ce400fa84b18" [ 1108.636300] env[61978]: _type = "Task" [ 1108.636300] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.645442] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5232f069-b9a0-993e-77dc-ce400fa84b18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.686647] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395378, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.733146] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1108.733400] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.582s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.733708] env[61978]: DEBUG oslo_concurrency.lockutils [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.641s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.733917] env[61978]: DEBUG oslo_concurrency.lockutils [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.736147] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.047s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.736513] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.738579] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.301s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.738792] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.740536] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.506s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.742420] env[61978]: INFO nova.compute.claims [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1108.745500] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.745674] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Cleaning up deleted instances {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1108.782733] env[61978]: INFO nova.scheduler.client.report [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Deleted allocations for instance 92eb5edb-803b-48d4-8c4f-338d7c3b3d13 [ 1108.786305] env[61978]: INFO nova.scheduler.client.report [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Deleted allocations for instance 96bef3f3-a45c-43ba-a86a-66c1d5686ea6 [ 1108.810515] env[61978]: INFO nova.scheduler.client.report [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Deleted allocations for instance f4034944-3a9d-4e14-a545-0bf574465e0b [ 1109.150277] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5232f069-b9a0-993e-77dc-ce400fa84b18, 'name': SearchDatastore_Task, 'duration_secs': 0.054358} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.150277] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.150277] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1109.150507] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.150691] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.150960] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1109.151176] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5df19411-7aa1-4af2-a7fa-f290f15ab218 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.162087] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1109.162350] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1109.163216] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4cc19a2-0e85-4ef8-9fd9-013b834cd836 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.169993] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1109.169993] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521bc40a-bd5f-4d57-e931-03c0476607f2" [ 1109.169993] env[61978]: _type = "Task" [ 1109.169993] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.179135] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521bc40a-bd5f-4d57-e931-03c0476607f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.187546] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395378, 'name': ReconfigVM_Task, 'duration_secs': 0.662913} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.187834] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Reconfigured VM instance instance-00000044 to attach disk [datastore2] cf6d8815-ed87-4629-9df9-6f406ac2fe6e/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1109.188709] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdd49d6-e8dd-44ba-aeaa-d3c04c11588b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.221339] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ccf3aaab-6138-4cf8-9f24-9e64a4d046eb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.239976] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1109.239976] env[61978]: value = "task-1395379" [ 1109.239976] env[61978]: _type = "Task" [ 1109.239976] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.270161] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] There are 44 instances to clean {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1109.270510] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a0ea73d1-a613-4403-8527-a8b81a619adf] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1109.274448] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395379, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.295576] env[61978]: DEBUG oslo_concurrency.lockutils [None req-615af7c3-603c-4307-acec-b1a91c79cd12 tempest-VolumesAdminNegativeTest-314994351 tempest-VolumesAdminNegativeTest-314994351-project-member] Lock "96bef3f3-a45c-43ba-a86a-66c1d5686ea6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.357s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.296800] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ced53ad2-e6f6-4165-98af-9c3e71b2dfe8 tempest-ServersAdminTestJSON-1999249038 tempest-ServersAdminTestJSON-1999249038-project-member] Lock "92eb5edb-803b-48d4-8c4f-338d7c3b3d13" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.653s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.318531] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d7b007ca-efae-4c30-bb3e-52e370a58980 tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "f4034944-3a9d-4e14-a545-0bf574465e0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.594s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.659372] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce98820c-6d47-4990-a0ea-197c0cd3c161 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.668970] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd532bae-f0dd-4286-a6b5-d5868b30e620 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.687022] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521bc40a-bd5f-4d57-e931-03c0476607f2, 'name': SearchDatastore_Task, 'duration_secs': 0.065965} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.713355] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41ca9ae1-4125-4527-a5ca-994852b35156 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.716657] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a645dcf8-e37b-4637-aa1f-9cd688ec4483 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.725039] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1109.725039] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f34456-6a9c-2e6d-3237-df52a42aef60" [ 1109.725039] env[61978]: _type = "Task" [ 1109.725039] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.730121] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928aa2b3-479f-44e9-8f0b-5319eb83e184 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.749946] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f34456-6a9c-2e6d-3237-df52a42aef60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.751252] env[61978]: DEBUG nova.compute.provider_tree [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.769581] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395379, 'name': ReconfigVM_Task, 'duration_secs': 0.482337} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.769923] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1109.770231] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-894eb399-a54b-448d-ae54-8bd40e2d7c2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.775306] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 7e6178cf-b7be-46f8-8f8c-8605a09703c7] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1109.778944] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1109.778944] env[61978]: value = "task-1395380" [ 1109.778944] env[61978]: _type = "Task" [ 1109.778944] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.790336] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395380, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.920930] env[61978]: DEBUG oslo_concurrency.lockutils [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "b932d221-aca9-4853-aa9c-2d27981e878c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.920930] env[61978]: DEBUG oslo_concurrency.lockutils [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "b932d221-aca9-4853-aa9c-2d27981e878c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.921933] env[61978]: DEBUG oslo_concurrency.lockutils [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "b932d221-aca9-4853-aa9c-2d27981e878c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.921933] env[61978]: DEBUG oslo_concurrency.lockutils [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "b932d221-aca9-4853-aa9c-2d27981e878c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.921933] env[61978]: DEBUG oslo_concurrency.lockutils [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "b932d221-aca9-4853-aa9c-2d27981e878c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.923728] env[61978]: INFO nova.compute.manager [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Terminating instance [ 1109.925488] env[61978]: DEBUG oslo_concurrency.lockutils [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "refresh_cache-b932d221-aca9-4853-aa9c-2d27981e878c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.925695] env[61978]: DEBUG oslo_concurrency.lockutils [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquired lock "refresh_cache-b932d221-aca9-4853-aa9c-2d27981e878c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.925875] env[61978]: DEBUG nova.network.neutron [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1110.239307] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f34456-6a9c-2e6d-3237-df52a42aef60, 'name': SearchDatastore_Task, 'duration_secs': 0.01884} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.239678] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1110.240255] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] c861eaa2-1c57-476f-92b3-886c8e44f6b4/c861eaa2-1c57-476f-92b3-886c8e44f6b4.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1110.240601] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eaf0acbd-1e07-4592-ae43-64f41edcea0f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.249662] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1110.249662] env[61978]: value = "task-1395381" [ 1110.249662] env[61978]: _type = "Task" [ 1110.249662] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.258225] env[61978]: DEBUG nova.scheduler.client.report [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1110.265528] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.280571] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: f3a9f204-e4ed-49f1-85ef-8cea7377cf89] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1110.294792] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395380, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.445700] env[61978]: DEBUG nova.network.neutron [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1110.566912] env[61978]: DEBUG nova.network.neutron [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.771181] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.027s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.771181] env[61978]: DEBUG nova.compute.manager [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1110.776700] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395381, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.776700] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.134s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.776700] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.784366] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.129s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.784366] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.002s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.788664] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.526s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.789020] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.002s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.791941] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 17.036s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.796636] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 3ee1023c-7837-4db0-88d4-f88c9a43fba3] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1110.812864] env[61978]: DEBUG oslo_vmware.api [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395380, 'name': PowerOnVM_Task, 'duration_secs': 0.615517} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.814082] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1110.819266] env[61978]: DEBUG nova.compute.manager [None req-0edf76ee-b57e-4fa8-a928-110fa7968719 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1110.819751] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee92cae-7f53-412d-b9bc-9b312df3e2ae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.826154] env[61978]: INFO nova.scheduler.client.report [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Deleted allocations for instance f1001633-e4e5-4de1-8a6b-cf653e43d821 [ 1110.838149] env[61978]: INFO nova.scheduler.client.report [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Deleted allocations for instance b356fc81-f857-4416-8eb0-28c66d137967 [ 1110.850790] env[61978]: INFO nova.scheduler.client.report [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Deleted allocations for instance b26a4784-698d-477a-8db7-58156899d231 [ 1111.069845] env[61978]: DEBUG oslo_concurrency.lockutils [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Releasing lock "refresh_cache-b932d221-aca9-4853-aa9c-2d27981e878c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1111.069845] env[61978]: DEBUG nova.compute.manager [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1111.069845] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1111.072272] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376678a2-af16-4ebb-8505-30e0cf192da0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.081807] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1111.085015] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cdd1426-1c19-41b9-b284-e251f2e3b2d8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.090442] env[61978]: DEBUG oslo_vmware.api [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1111.090442] env[61978]: value = "task-1395382" [ 1111.090442] env[61978]: _type = "Task" [ 1111.090442] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.100948] env[61978]: DEBUG oslo_vmware.api [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.267502] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395381, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.755036} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.267832] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] c861eaa2-1c57-476f-92b3-886c8e44f6b4/c861eaa2-1c57-476f-92b3-886c8e44f6b4.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1111.268179] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1111.268529] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63f3e5c7-915a-4069-b0d0-0d75916ed8d4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.278221] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1111.278221] env[61978]: value = "task-1395383" [ 1111.278221] env[61978]: _type = "Task" [ 1111.278221] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.287920] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395383, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.296824] env[61978]: DEBUG nova.compute.utils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1111.304822] env[61978]: DEBUG nova.compute.manager [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1111.304822] env[61978]: DEBUG nova.network.neutron [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1111.307822] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 5d9556d2-fcdd-416f-8f16-0fb271ff4ca5] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1111.359440] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8080acca-e9af-4020-aecf-1a5c073146ad tempest-ServersTestFqdnHostnames-1581446948 tempest-ServersTestFqdnHostnames-1581446948-project-member] Lock "f1001633-e4e5-4de1-8a6b-cf653e43d821" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.107s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.367426] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bd88040-8206-4f20-83b1-addaa8f80182 tempest-ListImageFiltersTestJSON-230309819 tempest-ListImageFiltersTestJSON-230309819-project-member] Lock "b356fc81-f857-4416-8eb0-28c66d137967" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.146s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.369730] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15e7a81-b80a-4c80-9fee-fafd711ad6ec tempest-MigrationsAdminTest-706431539 tempest-MigrationsAdminTest-706431539-project-member] Lock "b26a4784-698d-477a-8db7-58156899d231" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.913s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.419860] env[61978]: DEBUG nova.policy [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ab697d6ab4e4ece8b290afbf5ec1366', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a33ac41ae0247b59c400c6ed9145239', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1111.605402] env[61978]: DEBUG oslo_vmware.api [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395382, 'name': PowerOffVM_Task, 'duration_secs': 0.227924} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.605402] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1111.605402] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1111.605512] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4076e11-e0a3-4ada-861e-b06454503ad2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.642169] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1111.643613] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1111.643613] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Deleting the datastore file [datastore2] b932d221-aca9-4853-aa9c-2d27981e878c {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1111.643613] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b30a7bbc-db90-4f85-b16e-d2c115f7138d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.651787] env[61978]: DEBUG oslo_vmware.api [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for the task: (returnval){ [ 1111.651787] env[61978]: value = "task-1395385" [ 1111.651787] env[61978]: _type = "Task" [ 1111.651787] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.660531] env[61978]: DEBUG oslo_vmware.api [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395385, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.694346] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793cf586-aaa1-4146-8b96-4d25d03c151f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.704833] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5751efc-ae32-40ea-9cdb-190cc92866ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.738917] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e7742a-ba41-4f81-b8a6-8e5ba4875cf2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.747546] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c84cc58-6458-47ac-b4bf-8ea93d8d92b2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.764706] env[61978]: DEBUG nova.compute.provider_tree [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1111.797414] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395383, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078625} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.798952] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1111.800547] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb6cf62-04e2-4ef3-802d-103584540688 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.824111] env[61978]: DEBUG nova.compute.manager [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1111.827799] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: adf25af8-28c4-444e-b849-88d643f57dcf] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1111.843526] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] c861eaa2-1c57-476f-92b3-886c8e44f6b4/c861eaa2-1c57-476f-92b3-886c8e44f6b4.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1111.843886] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07841e78-ca0c-437d-b460-638f4886661f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.868243] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1111.868243] env[61978]: value = "task-1395386" [ 1111.868243] env[61978]: _type = "Task" [ 1111.868243] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.881174] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395386, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.163454] env[61978]: DEBUG oslo_vmware.api [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Task: {'id': task-1395385, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.324346} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.163760] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1112.163964] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1112.164168] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1112.164346] env[61978]: INFO nova.compute.manager [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1112.164595] env[61978]: DEBUG oslo.service.loopingcall [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1112.164798] env[61978]: DEBUG nova.compute.manager [-] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1112.164893] env[61978]: DEBUG nova.network.neutron [-] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1112.256057] env[61978]: DEBUG nova.network.neutron [-] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1112.286054] env[61978]: ERROR nova.scheduler.client.report [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [req-5cfd730d-504f-47e2-8441-be75adeab87d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5cfd730d-504f-47e2-8441-be75adeab87d"}]} [ 1112.305889] env[61978]: DEBUG nova.scheduler.client.report [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1112.335545] env[61978]: DEBUG nova.scheduler.client.report [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1112.336264] env[61978]: DEBUG nova.compute.provider_tree [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1112.347294] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: dbd5bf18-4cf7-46c4-9a8f-7ecd87f5ed56] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1112.353507] env[61978]: DEBUG nova.scheduler.client.report [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1112.380267] env[61978]: DEBUG nova.scheduler.client.report [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1112.388312] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395386, 'name': ReconfigVM_Task, 'duration_secs': 0.315819} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.389515] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Reconfigured VM instance instance-00000045 to attach disk [datastore2] c861eaa2-1c57-476f-92b3-886c8e44f6b4/c861eaa2-1c57-476f-92b3-886c8e44f6b4.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1112.392358] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ddc010b0-e045-4ad0-b80e-3995e93df82d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.401736] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1112.401736] env[61978]: value = "task-1395387" [ 1112.401736] env[61978]: _type = "Task" [ 1112.401736] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.415728] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395387, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.686208] env[61978]: DEBUG nova.network.neutron [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Successfully created port: 28538b34-2ffa-4e6e-a451-0654e6ec063d {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1112.749546] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5d9c36-a214-476b-9dce-ec91091d98e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.759438] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4fe4ab-3d00-4f59-a8da-3e1d206630e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.762566] env[61978]: DEBUG nova.network.neutron [-] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.799292] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf7ee39-e8f2-4c1c-9297-9da5cc599de0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.809277] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c8087e-d16e-453f-b3b4-39688e8f8fd9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.829101] env[61978]: DEBUG nova.compute.provider_tree [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1112.850527] env[61978]: DEBUG nova.compute.manager [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1112.852771] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 7e71c8de-1f94-4161-8ad8-a67792c5ce24] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1112.892457] env[61978]: DEBUG nova.virt.hardware [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1112.892759] env[61978]: DEBUG nova.virt.hardware [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1112.893112] env[61978]: DEBUG nova.virt.hardware [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1112.893112] env[61978]: DEBUG nova.virt.hardware [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1112.894129] env[61978]: DEBUG nova.virt.hardware [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1112.894129] env[61978]: DEBUG nova.virt.hardware [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1112.895617] env[61978]: DEBUG nova.virt.hardware [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1112.895720] env[61978]: DEBUG nova.virt.hardware [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1112.895942] env[61978]: DEBUG nova.virt.hardware [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1112.896201] env[61978]: DEBUG nova.virt.hardware [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1112.896402] env[61978]: DEBUG nova.virt.hardware [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1112.896936] env[61978]: INFO nova.compute.manager [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Unrescuing [ 1112.897235] env[61978]: DEBUG oslo_concurrency.lockutils [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "refresh_cache-cf6d8815-ed87-4629-9df9-6f406ac2fe6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.897403] env[61978]: DEBUG oslo_concurrency.lockutils [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquired lock "refresh_cache-cf6d8815-ed87-4629-9df9-6f406ac2fe6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.897576] env[61978]: DEBUG nova.network.neutron [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1112.900566] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020c9f2f-db1e-4e0d-8782-070802b98d71 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.917920] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa66e3aa-c2da-4e60-90ef-996a7dae610f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.922186] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395387, 'name': Rename_Task, 'duration_secs': 0.184389} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.923314] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1112.924066] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-923a04cf-2f64-4ee4-ad55-594c2d9b5fc8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.945277] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1112.945277] env[61978]: value = "task-1395388" [ 1112.945277] env[61978]: _type = "Task" [ 1112.945277] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.957113] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395388, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.267278] env[61978]: INFO nova.compute.manager [-] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Took 1.10 seconds to deallocate network for instance. [ 1113.358887] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 0d48ae5d-7cc8-42b3-a993-44636e9cb171] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1113.408290] env[61978]: DEBUG nova.scheduler.client.report [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1113.408570] env[61978]: DEBUG nova.compute.provider_tree [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 90 to 91 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1113.411204] env[61978]: DEBUG nova.compute.provider_tree [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1113.461285] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395388, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.774691] env[61978]: DEBUG oslo_concurrency.lockutils [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.854808] env[61978]: DEBUG nova.network.neutron [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Updating instance_info_cache with network_info: [{"id": "e8b9a7d5-25ce-419f-b3e9-9179be86e340", "address": "fa:16:3e:6c:d3:d1", "network": {"id": "a4bfbe6c-0ede-463d-935f-52cc2f1e3692", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1615190107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3c3fe7c7f560427db0f814a2c67bb527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5215e5b-294b-4e8c-bd06-355e9955ab1d", "external-id": "nsx-vlan-transportzone-529", "segmentation_id": 529, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8b9a7d5-25", "ovs_interfaceid": "e8b9a7d5-25ce-419f-b3e9-9179be86e340", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.865780] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: aeb1a40d-0c5c-4e1c-8922-4575f4a3e4ba] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1113.966214] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395388, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.359739] env[61978]: DEBUG oslo_concurrency.lockutils [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Releasing lock "refresh_cache-cf6d8815-ed87-4629-9df9-6f406ac2fe6e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.360474] env[61978]: DEBUG nova.objects.instance [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lazy-loading 'flavor' on Instance uuid cf6d8815-ed87-4629-9df9-6f406ac2fe6e {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1114.371201] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 2c1ce021-255f-454d-ba0e-c85380f3e973] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1114.429260] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.637s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.429463] env[61978]: DEBUG nova.compute.manager [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=61978) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4909}} [ 1114.432967] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.730s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.433383] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.435505] env[61978]: DEBUG oslo_concurrency.lockutils [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.748s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.435719] env[61978]: DEBUG oslo_concurrency.lockutils [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.437610] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.904s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.439220] env[61978]: INFO nova.compute.claims [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1114.464759] env[61978]: DEBUG oslo_vmware.api [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395388, 'name': PowerOnVM_Task, 'duration_secs': 1.220509} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.465060] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1114.465285] env[61978]: INFO nova.compute.manager [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Took 13.20 seconds to spawn the instance on the hypervisor. [ 1114.465469] env[61978]: DEBUG nova.compute.manager [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1114.467288] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c4f4e5-42f8-40f3-af4b-7a5cceb12c39 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.472717] env[61978]: INFO nova.scheduler.client.report [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Deleted allocations for instance f3c837fb-be7e-40a6-aae4-7f213c62ab2c [ 1114.485175] env[61978]: INFO nova.scheduler.client.report [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Deleted allocations for instance fdd0c16d-b0f8-4f81-9069-34d11f273acb [ 1114.609066] env[61978]: DEBUG nova.compute.manager [req-cb8b353c-3743-40e3-9b0e-893c27968654 req-30b71695-5b92-4b87-924f-b74d1897c93b service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Received event network-vif-plugged-28538b34-2ffa-4e6e-a451-0654e6ec063d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1114.609311] env[61978]: DEBUG oslo_concurrency.lockutils [req-cb8b353c-3743-40e3-9b0e-893c27968654 req-30b71695-5b92-4b87-924f-b74d1897c93b service nova] Acquiring lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.609535] env[61978]: DEBUG oslo_concurrency.lockutils [req-cb8b353c-3743-40e3-9b0e-893c27968654 req-30b71695-5b92-4b87-924f-b74d1897c93b service nova] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.609813] env[61978]: DEBUG oslo_concurrency.lockutils [req-cb8b353c-3743-40e3-9b0e-893c27968654 req-30b71695-5b92-4b87-924f-b74d1897c93b service nova] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.609912] env[61978]: DEBUG nova.compute.manager [req-cb8b353c-3743-40e3-9b0e-893c27968654 req-30b71695-5b92-4b87-924f-b74d1897c93b service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] No waiting events found dispatching network-vif-plugged-28538b34-2ffa-4e6e-a451-0654e6ec063d {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1114.610313] env[61978]: WARNING nova.compute.manager [req-cb8b353c-3743-40e3-9b0e-893c27968654 req-30b71695-5b92-4b87-924f-b74d1897c93b service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Received unexpected event network-vif-plugged-28538b34-2ffa-4e6e-a451-0654e6ec063d for instance with vm_state building and task_state spawning. [ 1114.858308] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Acquiring lock "38e4f039-20bc-4bed-b449-227bde070ed9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.858679] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Lock "38e4f039-20bc-4bed-b449-227bde070ed9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.868397] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5089009-165b-4b00-b73f-7ba20dc7bd33 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.874093] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: cb004a19-0048-4766-af7c-0fbde867f422] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1114.899235] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1114.901797] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d34f97a9-c0be-4228-b28a-4176008f9c8d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.909961] env[61978]: DEBUG oslo_vmware.api [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1114.909961] env[61978]: value = "task-1395389" [ 1114.909961] env[61978]: _type = "Task" [ 1114.909961] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.919983] env[61978]: DEBUG oslo_vmware.api [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395389, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.002447] env[61978]: INFO nova.compute.manager [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Took 41.10 seconds to build instance. [ 1115.003650] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7820223a-2499-4d31-bbf5-a98fb2656962 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "f3c837fb-be7e-40a6-aae4-7f213c62ab2c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.278s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.004626] env[61978]: DEBUG oslo_concurrency.lockutils [None req-70c0ef78-dc2a-40e7-b64a-2ad8f14ce7ed tempest-ServerMetadataTestJSON-51903539 tempest-ServerMetadataTestJSON-51903539-project-member] Lock "fdd0c16d-b0f8-4f81-9069-34d11f273acb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.276s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.050798] env[61978]: INFO nova.scheduler.client.report [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleted allocation for migration dfe6158b-18dd-4d7f-8f9a-1d1b35f9479f [ 1115.102606] env[61978]: DEBUG nova.network.neutron [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Successfully updated port: 28538b34-2ffa-4e6e-a451-0654e6ec063d {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1115.361431] env[61978]: DEBUG nova.compute.manager [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1115.378162] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: f930ab49-c215-4b2e-92b1-21c0d52a70eb] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1115.431697] env[61978]: DEBUG oslo_vmware.api [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395389, 'name': PowerOffVM_Task, 'duration_secs': 0.281013} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.431988] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1115.439264] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Reconfiguring VM instance instance-00000044 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1115.440781] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34e5b1bb-44a4-4180-8289-b085d9d46989 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.468181] env[61978]: DEBUG oslo_vmware.api [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1115.468181] env[61978]: value = "task-1395390" [ 1115.468181] env[61978]: _type = "Task" [ 1115.468181] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.477525] env[61978]: DEBUG oslo_vmware.api [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395390, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.507796] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ee81833f-03a4-4489-b87d-d5d43dbd0957 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.188s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.558150] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3ea1f043-fda4-4f40-9d22-381b09983a09 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "c17c986e-c008-4414-8dd1-4ea836458048" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 24.872s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.605935] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "refresh_cache-ae6b92bb-6f79-4b52-bdb7-095985bf2fad" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.606217] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "refresh_cache-ae6b92bb-6f79-4b52-bdb7-095985bf2fad" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.606434] env[61978]: DEBUG nova.network.neutron [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1115.805090] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7aee5b2-93b0-4cc5-b96a-582d63403112 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.814044] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c508ce5f-4047-49da-9393-9055d67d69a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.846953] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b570775-3142-4a9a-981f-21351533ccd6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.855916] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c12126-c9d5-4117-b9c9-a4c659a63a55 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.878902] env[61978]: DEBUG nova.compute.provider_tree [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1115.885242] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 8a21e6a7-c34e-4af0-b1fd-8a501694614c] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1115.900714] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.982056] env[61978]: DEBUG oslo_vmware.api [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395390, 'name': ReconfigVM_Task, 'duration_secs': 0.294771} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.982670] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Reconfigured VM instance instance-00000044 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1115.982670] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1115.982670] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f19b13a-685c-4f26-9b87-ae9a3fb8b5dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.991760] env[61978]: DEBUG oslo_vmware.api [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1115.991760] env[61978]: value = "task-1395391" [ 1115.991760] env[61978]: _type = "Task" [ 1115.991760] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.005714] env[61978]: DEBUG oslo_vmware.api [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.091959] env[61978]: DEBUG nova.objects.instance [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'flavor' on Instance uuid c17c986e-c008-4414-8dd1-4ea836458048 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1116.284504] env[61978]: DEBUG nova.network.neutron [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1116.382605] env[61978]: DEBUG nova.scheduler.client.report [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1116.389954] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: ea1c2d74-70b4-4547-a887-78e291c3082a] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1116.502989] env[61978]: DEBUG oslo_vmware.api [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395391, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.599157] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1116.599878] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.600274] env[61978]: DEBUG nova.network.neutron [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1116.600833] env[61978]: DEBUG nova.objects.instance [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'info_cache' on Instance uuid c17c986e-c008-4414-8dd1-4ea836458048 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1116.700320] env[61978]: DEBUG nova.compute.manager [req-d722d068-dc1e-4234-b3ec-93cf49a070d4 req-7a6039ef-c322-4f12-b865-5c037f8da0c1 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Received event network-changed-28538b34-2ffa-4e6e-a451-0654e6ec063d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1116.700992] env[61978]: DEBUG nova.compute.manager [req-d722d068-dc1e-4234-b3ec-93cf49a070d4 req-7a6039ef-c322-4f12-b865-5c037f8da0c1 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Refreshing instance network info cache due to event network-changed-28538b34-2ffa-4e6e-a451-0654e6ec063d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1116.701369] env[61978]: DEBUG oslo_concurrency.lockutils [req-d722d068-dc1e-4234-b3ec-93cf49a070d4 req-7a6039ef-c322-4f12-b865-5c037f8da0c1 service nova] Acquiring lock "refresh_cache-ae6b92bb-6f79-4b52-bdb7-095985bf2fad" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1116.702633] env[61978]: DEBUG nova.network.neutron [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Updating instance_info_cache with network_info: [{"id": "28538b34-2ffa-4e6e-a451-0654e6ec063d", "address": "fa:16:3e:67:e7:1c", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28538b34-2f", "ovs_interfaceid": "28538b34-2ffa-4e6e-a451-0654e6ec063d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.798037] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.798037] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.798037] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.798037] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.798037] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.801827] env[61978]: INFO nova.compute.manager [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Terminating instance [ 1116.807397] env[61978]: DEBUG nova.compute.manager [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1116.807963] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1116.810024] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f50b3f-1f0f-45c8-b24f-b4466f1af54a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.823389] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1116.823989] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc78bdb0-5856-46cd-b70a-709788bb0cb2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.834019] env[61978]: DEBUG oslo_vmware.api [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1116.834019] env[61978]: value = "task-1395392" [ 1116.834019] env[61978]: _type = "Task" [ 1116.834019] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.844772] env[61978]: DEBUG oslo_vmware.api [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.896023] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.455s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.896023] env[61978]: DEBUG nova.compute.manager [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1116.902019] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 3a30ecc4-455f-49cf-98e8-d38be6a1c5a5] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1116.902019] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.130s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.907686] env[61978]: INFO nova.compute.claims [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1117.004012] env[61978]: DEBUG oslo_vmware.api [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395391, 'name': PowerOnVM_Task, 'duration_secs': 0.707175} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.004335] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1117.004614] env[61978]: DEBUG nova.compute.manager [None req-10bbe35a-e630-4e72-9d96-544a99638f9e tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1117.005564] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49df02a3-fe70-4e71-86cd-20313d0c4f8d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.107605] env[61978]: DEBUG nova.objects.base [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1117.207706] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "refresh_cache-ae6b92bb-6f79-4b52-bdb7-095985bf2fad" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.207706] env[61978]: DEBUG nova.compute.manager [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Instance network_info: |[{"id": "28538b34-2ffa-4e6e-a451-0654e6ec063d", "address": "fa:16:3e:67:e7:1c", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28538b34-2f", "ovs_interfaceid": "28538b34-2ffa-4e6e-a451-0654e6ec063d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1117.207706] env[61978]: DEBUG oslo_concurrency.lockutils [req-d722d068-dc1e-4234-b3ec-93cf49a070d4 req-7a6039ef-c322-4f12-b865-5c037f8da0c1 service nova] Acquired lock "refresh_cache-ae6b92bb-6f79-4b52-bdb7-095985bf2fad" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.207706] env[61978]: DEBUG nova.network.neutron [req-d722d068-dc1e-4234-b3ec-93cf49a070d4 req-7a6039ef-c322-4f12-b865-5c037f8da0c1 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Refreshing network info cache for port 28538b34-2ffa-4e6e-a451-0654e6ec063d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1117.212398] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:e7:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28538b34-2ffa-4e6e-a451-0654e6ec063d', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.225801] env[61978]: DEBUG oslo.service.loopingcall [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1117.227289] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1117.227289] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-774c3f1c-030b-47b2-aad7-71a947183602 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.251101] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1117.251101] env[61978]: value = "task-1395393" [ 1117.251101] env[61978]: _type = "Task" [ 1117.251101] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.263693] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395393, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.345656] env[61978]: DEBUG oslo_vmware.api [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395392, 'name': PowerOffVM_Task, 'duration_secs': 0.276711} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.346247] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1117.346654] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1117.347070] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-370bb3dc-f1ed-46fa-b0d5-0c20fa20127d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.416152] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: dd686727-fc33-4dc4-b386-aabec27cf215] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1117.416594] env[61978]: DEBUG nova.compute.utils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1117.420863] env[61978]: DEBUG nova.compute.manager [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1117.421179] env[61978]: DEBUG nova.network.neutron [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1117.524027] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1117.524027] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1117.524027] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Deleting the datastore file [datastore2] c861eaa2-1c57-476f-92b3-886c8e44f6b4 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1117.524468] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0825574-5c86-407f-ae09-8027d7b83139 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.553275] env[61978]: DEBUG oslo_vmware.api [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for the task: (returnval){ [ 1117.553275] env[61978]: value = "task-1395395" [ 1117.553275] env[61978]: _type = "Task" [ 1117.553275] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.563022] env[61978]: DEBUG oslo_vmware.api [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395395, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.566652] env[61978]: DEBUG nova.policy [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '394d03fc54234c369ad2e1255eee9c82', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c31ffdd4e70d40ecbbb56777f9422a52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1117.766629] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395393, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.921203] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b7fc5ee0-7671-40c7-8d12-bacdd4b57fa3] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1117.923375] env[61978]: DEBUG nova.compute.manager [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1118.063786] env[61978]: DEBUG oslo_vmware.api [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Task: {'id': task-1395395, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.483499} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.063897] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1118.064326] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1118.064613] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1118.065303] env[61978]: INFO nova.compute.manager [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1118.065303] env[61978]: DEBUG oslo.service.loopingcall [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1118.065411] env[61978]: DEBUG nova.compute.manager [-] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1118.065459] env[61978]: DEBUG nova.network.neutron [-] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1118.184969] env[61978]: DEBUG nova.network.neutron [req-d722d068-dc1e-4234-b3ec-93cf49a070d4 req-7a6039ef-c322-4f12-b865-5c037f8da0c1 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Updated VIF entry in instance network info cache for port 28538b34-2ffa-4e6e-a451-0654e6ec063d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1118.185942] env[61978]: DEBUG nova.network.neutron [req-d722d068-dc1e-4234-b3ec-93cf49a070d4 req-7a6039ef-c322-4f12-b865-5c037f8da0c1 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Updating instance_info_cache with network_info: [{"id": "28538b34-2ffa-4e6e-a451-0654e6ec063d", "address": "fa:16:3e:67:e7:1c", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28538b34-2f", "ovs_interfaceid": "28538b34-2ffa-4e6e-a451-0654e6ec063d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.265395] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395393, 'name': CreateVM_Task, 'duration_secs': 0.710456} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.265565] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1118.267526] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1118.267526] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.267903] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1118.271956] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddc4859b-107f-4a67-84d6-36610f429f7f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.276943] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1118.276943] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52958325-4c28-02aa-cb15-08c7e82ac89c" [ 1118.276943] env[61978]: _type = "Task" [ 1118.276943] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.295602] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52958325-4c28-02aa-cb15-08c7e82ac89c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.434022] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: f22e097d-f1a5-414a-82cc-ab455db876c7] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1118.456421] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c47b506-ef63-409a-8ab3-1ad0f639b979 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.466017] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce090f92-6314-45a9-8033-1e0d006d19c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.506859] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca7ad1c-cc69-4580-bb7f-8a52b3347989 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.516172] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07895d98-d5c4-4ede-9908-e14a75556eeb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.532959] env[61978]: DEBUG nova.compute.provider_tree [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1118.596145] env[61978]: DEBUG nova.network.neutron [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance_info_cache with network_info: [{"id": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "address": "fa:16:3e:5c:ce:8c", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap394a8251-68", "ovs_interfaceid": "394a8251-684b-4ddc-ae5c-7ef7ec06b503", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.635700] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "cf6d8815-ed87-4629-9df9-6f406ac2fe6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.636058] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "cf6d8815-ed87-4629-9df9-6f406ac2fe6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.636317] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "cf6d8815-ed87-4629-9df9-6f406ac2fe6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.636554] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "cf6d8815-ed87-4629-9df9-6f406ac2fe6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.636764] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "cf6d8815-ed87-4629-9df9-6f406ac2fe6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.639448] env[61978]: INFO nova.compute.manager [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Terminating instance [ 1118.641200] env[61978]: DEBUG nova.compute.manager [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1118.641443] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1118.642366] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec8bb61-9de7-46aa-b5a5-ddb209d7ecf7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.652012] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1118.652310] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6f9c6fb-0aff-4888-b29c-bcae6aca5ca2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.661142] env[61978]: DEBUG oslo_vmware.api [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1118.661142] env[61978]: value = "task-1395396" [ 1118.661142] env[61978]: _type = "Task" [ 1118.661142] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.672825] env[61978]: DEBUG oslo_vmware.api [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395396, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.689239] env[61978]: DEBUG oslo_concurrency.lockutils [req-d722d068-dc1e-4234-b3ec-93cf49a070d4 req-7a6039ef-c322-4f12-b865-5c037f8da0c1 service nova] Releasing lock "refresh_cache-ae6b92bb-6f79-4b52-bdb7-095985bf2fad" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1118.729515] env[61978]: DEBUG nova.network.neutron [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Successfully created port: ddce6e3c-0596-4fb1-81fc-7ad5823e1f15 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1118.793203] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52958325-4c28-02aa-cb15-08c7e82ac89c, 'name': SearchDatastore_Task, 'duration_secs': 0.025061} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.793430] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1118.793682] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1118.793936] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1118.794202] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.794304] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1118.794583] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ace4da75-8075-4603-ad74-a66672575b26 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.806980] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1118.807199] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1118.808653] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1fc0b49-0bc7-4888-bc8f-56f888e62fc9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.816334] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1118.816334] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ab22ad-5c17-5d89-6056-03552bb274ca" [ 1118.816334] env[61978]: _type = "Task" [ 1118.816334] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.831177] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ab22ad-5c17-5d89-6056-03552bb274ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.939543] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 78b78ae7-74fe-4403-be9b-229abe6a7353] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1118.944695] env[61978]: DEBUG nova.compute.manager [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1118.981272] env[61978]: DEBUG nova.virt.hardware [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='40599cb946e414ff3d81a7a2471f8f96',container_format='bare',created_at=2024-11-04T15:07:25Z,direct_url=,disk_format='vmdk',id=2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a,min_disk=1,min_ram=0,name='tempest-test-snap-359082124',owner='c31ffdd4e70d40ecbbb56777f9422a52',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-11-04T15:07:40Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1118.981972] env[61978]: DEBUG nova.virt.hardware [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1118.982521] env[61978]: DEBUG nova.virt.hardware [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1118.982898] env[61978]: DEBUG nova.virt.hardware [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1118.983218] env[61978]: DEBUG nova.virt.hardware [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1118.983915] env[61978]: DEBUG nova.virt.hardware [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1118.984286] env[61978]: DEBUG nova.virt.hardware [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1118.984561] env[61978]: DEBUG nova.virt.hardware [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1118.984921] env[61978]: DEBUG nova.virt.hardware [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1118.985242] env[61978]: DEBUG nova.virt.hardware [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1118.985571] env[61978]: DEBUG nova.virt.hardware [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1118.986644] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d4354b-cf77-4f70-9b95-6b38fcba0026 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.000190] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9470dc75-fb2b-45d3-b277-1eb746d181b7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.056106] env[61978]: ERROR nova.scheduler.client.report [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [req-42446545-1423-4019-805f-497dd44ad662] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-42446545-1423-4019-805f-497dd44ad662"}]} [ 1119.081087] env[61978]: DEBUG nova.scheduler.client.report [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1119.099422] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-c17c986e-c008-4414-8dd1-4ea836458048" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.106502] env[61978]: DEBUG nova.scheduler.client.report [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1119.106774] env[61978]: DEBUG nova.compute.provider_tree [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1119.129013] env[61978]: DEBUG nova.scheduler.client.report [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1119.151415] env[61978]: DEBUG nova.scheduler.client.report [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1119.181823] env[61978]: DEBUG oslo_vmware.api [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395396, 'name': PowerOffVM_Task, 'duration_secs': 0.269733} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.182137] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1119.182360] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1119.184297] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-beb92804-f210-49dc-b184-eed2c3263bde {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.256162] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1119.259020] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1119.259020] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Deleting the datastore file [datastore2] cf6d8815-ed87-4629-9df9-6f406ac2fe6e {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1119.259020] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2d3bb0c-26ff-4e1e-82ad-2b4daceebafc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.267806] env[61978]: DEBUG oslo_vmware.api [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1119.267806] env[61978]: value = "task-1395398" [ 1119.267806] env[61978]: _type = "Task" [ 1119.267806] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.281857] env[61978]: DEBUG oslo_vmware.api [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395398, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.330983] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ab22ad-5c17-5d89-6056-03552bb274ca, 'name': SearchDatastore_Task, 'duration_secs': 0.014889} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.340789] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43be9429-827c-4e34-9aab-2eb86b863911 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.349826] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1119.349826] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f710dc-ec78-0858-b131-28863ef32d61" [ 1119.349826] env[61978]: _type = "Task" [ 1119.349826] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.365365] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f710dc-ec78-0858-b131-28863ef32d61, 'name': SearchDatastore_Task, 'duration_secs': 0.010701} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.365452] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.365731] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] ae6b92bb-6f79-4b52-bdb7-095985bf2fad/ae6b92bb-6f79-4b52-bdb7-095985bf2fad.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1119.366074] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4bd45873-3d7f-4fc5-82d3-b317bfa0f395 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.378057] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1119.378057] env[61978]: value = "task-1395399" [ 1119.378057] env[61978]: _type = "Task" [ 1119.378057] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.392054] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395399, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.414984] env[61978]: DEBUG nova.compute.manager [req-57a2952b-23f9-4233-8569-81a176f41c2a req-d0349561-9203-44eb-b61a-7d077611eb44 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Received event network-vif-deleted-4f64e24d-64d3-4410-8f23-96ae24053c6c {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1119.418401] env[61978]: INFO nova.compute.manager [req-57a2952b-23f9-4233-8569-81a176f41c2a req-d0349561-9203-44eb-b61a-7d077611eb44 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Neutron deleted interface 4f64e24d-64d3-4410-8f23-96ae24053c6c; detaching it from the instance and deleting it from the info cache [ 1119.418401] env[61978]: DEBUG nova.network.neutron [req-57a2952b-23f9-4233-8569-81a176f41c2a req-d0349561-9203-44eb-b61a-7d077611eb44 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Updating instance_info_cache with network_info: [{"id": "35ad6d98-9443-41d0-8356-d62f53f931fb", "address": "fa:16:3e:34:2f:ed", "network": {"id": "7ea59c49-3f2d-442c-9d6a-e0bccc3a5062", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-364359999", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "26cb7552530047c5867347d62195121e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35ad6d98-94", "ovs_interfaceid": "35ad6d98-9443-41d0-8356-d62f53f931fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.441553] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b49d3ddb-5f8e-4ca8-8a3c-773c51b98f14] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1119.516466] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb20ea2-e80f-4bd1-a35e-dfe7fc34e1ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.524926] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2f42e9-1370-4f1f-9a07-03835cdd1d4f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.558404] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168fb3ac-c832-415d-8b33-c47e1739eba0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.567734] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e494276b-62d6-4229-b461-c0270f7ac81e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.581534] env[61978]: DEBUG nova.compute.provider_tree [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1119.602816] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1119.603563] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dbace0d9-8dd1-47ed-9898-98203daa5cbf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.612456] env[61978]: DEBUG oslo_vmware.api [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1119.612456] env[61978]: value = "task-1395400" [ 1119.612456] env[61978]: _type = "Task" [ 1119.612456] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.621685] env[61978]: DEBUG oslo_vmware.api [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395400, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.779588] env[61978]: DEBUG oslo_vmware.api [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395398, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157897} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.779933] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1119.780155] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1119.780347] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1119.780534] env[61978]: INFO nova.compute.manager [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1119.780791] env[61978]: DEBUG oslo.service.loopingcall [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1119.781018] env[61978]: DEBUG nova.compute.manager [-] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1119.781222] env[61978]: DEBUG nova.network.neutron [-] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1119.888850] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395399, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.921750] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17b7e2ae-0042-4696-9773-aa8ae6a0ae74 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.927333] env[61978]: DEBUG nova.network.neutron [-] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.940534] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc120d4-f33b-483c-938a-b2b2e6528a5f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.954236] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 32bcb974-8db9-43e2-b397-b497f3a4f30c] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1119.992065] env[61978]: DEBUG nova.compute.manager [req-57a2952b-23f9-4233-8569-81a176f41c2a req-d0349561-9203-44eb-b61a-7d077611eb44 service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Detach interface failed, port_id=4f64e24d-64d3-4410-8f23-96ae24053c6c, reason: Instance c861eaa2-1c57-476f-92b3-886c8e44f6b4 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1120.123020] env[61978]: ERROR nova.scheduler.client.report [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [req-9f1f3254-4435-42a6-b0f8-3afd05be12c6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9f1f3254-4435-42a6-b0f8-3afd05be12c6"}]} [ 1120.128307] env[61978]: DEBUG oslo_vmware.api [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395400, 'name': PowerOnVM_Task, 'duration_secs': 0.494334} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.128697] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1120.128920] env[61978]: DEBUG nova.compute.manager [None req-e3b7a972-3055-4774-9f2c-5bf39da8530d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1120.132018] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a63cc7e-c343-431c-8d45-97255d5cdb84 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.152033] env[61978]: DEBUG nova.scheduler.client.report [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1120.154046] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.154046] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.177400] env[61978]: DEBUG nova.scheduler.client.report [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1120.177701] env[61978]: DEBUG nova.compute.provider_tree [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1120.193927] env[61978]: DEBUG nova.scheduler.client.report [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1120.216431] env[61978]: DEBUG nova.scheduler.client.report [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1120.391659] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395399, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553899} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.395433] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] ae6b92bb-6f79-4b52-bdb7-095985bf2fad/ae6b92bb-6f79-4b52-bdb7-095985bf2fad.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1120.395772] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1120.396720] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fea7431a-9580-40c7-9a98-f052dd213af4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.406616] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1120.406616] env[61978]: value = "task-1395401" [ 1120.406616] env[61978]: _type = "Task" [ 1120.406616] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.422657] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395401, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.430341] env[61978]: INFO nova.compute.manager [-] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Took 2.36 seconds to deallocate network for instance. [ 1120.464665] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: e30d4a9f-1d75-453c-9552-2a0fbd4aa87d] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1120.583054] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223af0ad-b476-40e1-9a75-dbf1f77619f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.592374] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546a531a-2beb-4fa6-afcd-def4b6004356 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.628684] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a84a278-ae7e-4762-9dfd-15f16d8d82e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.640507] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50802260-8ad9-4676-bf17-b6c7921f9abc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.657358] env[61978]: DEBUG nova.compute.manager [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1120.672339] env[61978]: DEBUG nova.compute.provider_tree [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1120.833142] env[61978]: DEBUG nova.network.neutron [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Successfully updated port: ddce6e3c-0596-4fb1-81fc-7ad5823e1f15 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1120.845330] env[61978]: DEBUG nova.network.neutron [-] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.916843] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395401, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071581} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.916843] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1120.917920] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a88feff-77e5-4a3a-8c7a-da93bf0d4ca2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.942297] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.952450] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] ae6b92bb-6f79-4b52-bdb7-095985bf2fad/ae6b92bb-6f79-4b52-bdb7-095985bf2fad.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1120.952792] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5aa3d41-c824-4ce5-8522-f6f0eefc16d2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.969389] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: ff793464-9bef-449f-8485-36d3b8fb1d69] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1120.980480] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1120.980480] env[61978]: value = "task-1395402" [ 1120.980480] env[61978]: _type = "Task" [ 1120.980480] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.990596] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395402, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.202619] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.220899] env[61978]: DEBUG nova.scheduler.client.report [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 94 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1121.221893] env[61978]: DEBUG nova.compute.provider_tree [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 94 to 95 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1121.221893] env[61978]: DEBUG nova.compute.provider_tree [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1121.341898] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "refresh_cache-de0f46af-870a-4095-a417-913a2c51f66b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.342078] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "refresh_cache-de0f46af-870a-4095-a417-913a2c51f66b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.342242] env[61978]: DEBUG nova.network.neutron [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1121.350550] env[61978]: INFO nova.compute.manager [-] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Took 1.57 seconds to deallocate network for instance. [ 1121.450376] env[61978]: DEBUG nova.compute.manager [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Received event network-vif-deleted-35ad6d98-9443-41d0-8356-d62f53f931fb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1121.450609] env[61978]: DEBUG nova.compute.manager [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Received event network-vif-deleted-e8b9a7d5-25ce-419f-b3e9-9179be86e340 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1121.450790] env[61978]: DEBUG nova.compute.manager [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Received event network-vif-plugged-ddce6e3c-0596-4fb1-81fc-7ad5823e1f15 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1121.450986] env[61978]: DEBUG oslo_concurrency.lockutils [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] Acquiring lock "de0f46af-870a-4095-a417-913a2c51f66b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.451527] env[61978]: DEBUG oslo_concurrency.lockutils [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] Lock "de0f46af-870a-4095-a417-913a2c51f66b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.451771] env[61978]: DEBUG oslo_concurrency.lockutils [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] Lock "de0f46af-870a-4095-a417-913a2c51f66b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.452051] env[61978]: DEBUG nova.compute.manager [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] [instance: de0f46af-870a-4095-a417-913a2c51f66b] No waiting events found dispatching network-vif-plugged-ddce6e3c-0596-4fb1-81fc-7ad5823e1f15 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1121.452278] env[61978]: WARNING nova.compute.manager [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Received unexpected event network-vif-plugged-ddce6e3c-0596-4fb1-81fc-7ad5823e1f15 for instance with vm_state building and task_state spawning. [ 1121.452727] env[61978]: DEBUG nova.compute.manager [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Received event network-changed-ddce6e3c-0596-4fb1-81fc-7ad5823e1f15 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1121.452727] env[61978]: DEBUG nova.compute.manager [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Refreshing instance network info cache due to event network-changed-ddce6e3c-0596-4fb1-81fc-7ad5823e1f15. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1121.453095] env[61978]: DEBUG oslo_concurrency.lockutils [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] Acquiring lock "refresh_cache-de0f46af-870a-4095-a417-913a2c51f66b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.473675] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 50788030-4dc2-4215-bf2c-acba5dd33ce4] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1121.497435] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.729286] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.828s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.729864] env[61978]: DEBUG nova.compute.manager [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1121.734783] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.803s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.735429] env[61978]: INFO nova.compute.claims [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1121.811042] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "c17c986e-c008-4414-8dd1-4ea836458048" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.811393] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "c17c986e-c008-4414-8dd1-4ea836458048" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.811631] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "c17c986e-c008-4414-8dd1-4ea836458048-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.811832] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "c17c986e-c008-4414-8dd1-4ea836458048-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.812019] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "c17c986e-c008-4414-8dd1-4ea836458048-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.816301] env[61978]: INFO nova.compute.manager [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Terminating instance [ 1121.820483] env[61978]: DEBUG nova.compute.manager [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1121.821364] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1121.821514] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4437f4-b5a9-41e9-be18-1e2b8941a023 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.831792] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1121.832019] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33fd85e8-c787-4f3a-a199-9cac53c92487 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.845136] env[61978]: DEBUG oslo_vmware.api [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1121.845136] env[61978]: value = "task-1395403" [ 1121.845136] env[61978]: _type = "Task" [ 1121.845136] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.865239] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.865597] env[61978]: DEBUG oslo_vmware.api [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395403, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.895542] env[61978]: DEBUG nova.network.neutron [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1121.977721] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: bb0c149c-920e-47c4-a960-47b2fb443431] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1121.997327] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395402, 'name': ReconfigVM_Task, 'duration_secs': 0.772504} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.997327] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Reconfigured VM instance instance-00000046 to attach disk [datastore1] ae6b92bb-6f79-4b52-bdb7-095985bf2fad/ae6b92bb-6f79-4b52-bdb7-095985bf2fad.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.997571] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86ba398a-e6a9-4f86-8734-c622e1ad59ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.007404] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1122.007404] env[61978]: value = "task-1395404" [ 1122.007404] env[61978]: _type = "Task" [ 1122.007404] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.021377] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395404, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.129863] env[61978]: DEBUG nova.network.neutron [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Updating instance_info_cache with network_info: [{"id": "ddce6e3c-0596-4fb1-81fc-7ad5823e1f15", "address": "fa:16:3e:fe:1d:4b", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddce6e3c-05", "ovs_interfaceid": "ddce6e3c-0596-4fb1-81fc-7ad5823e1f15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.240132] env[61978]: DEBUG nova.compute.utils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1122.247090] env[61978]: DEBUG nova.compute.manager [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1122.247775] env[61978]: DEBUG nova.network.neutron [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1122.300086] env[61978]: DEBUG nova.policy [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8f50bac42274555ab08e047cdb028ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43ebac7c44604f55b94cbc06648f4908', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1122.356934] env[61978]: DEBUG oslo_vmware.api [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395403, 'name': PowerOffVM_Task, 'duration_secs': 0.21999} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.357241] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1122.357419] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1122.357683] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d494e0d-4683-46b8-98cc-dd6060a74a97 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.481309] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 76dff032-a806-4910-a48b-8850b05131c1] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1122.523046] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395404, 'name': Rename_Task, 'duration_secs': 0.179233} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.523317] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1122.523628] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5cb9984d-5d7f-4a3e-8232-6fb79cb26530 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.552567] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1122.552567] env[61978]: value = "task-1395406" [ 1122.552567] env[61978]: _type = "Task" [ 1122.552567] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.563750] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395406, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.602960] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1122.602960] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1122.602960] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleting the datastore file [datastore2] c17c986e-c008-4414-8dd1-4ea836458048 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1122.602960] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ecaa090-7c36-4e17-8d57-53a33e0ec8a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.610209] env[61978]: DEBUG oslo_vmware.api [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1122.610209] env[61978]: value = "task-1395407" [ 1122.610209] env[61978]: _type = "Task" [ 1122.610209] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.622045] env[61978]: DEBUG oslo_vmware.api [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395407, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.636839] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "refresh_cache-de0f46af-870a-4095-a417-913a2c51f66b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.637253] env[61978]: DEBUG nova.compute.manager [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Instance network_info: |[{"id": "ddce6e3c-0596-4fb1-81fc-7ad5823e1f15", "address": "fa:16:3e:fe:1d:4b", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddce6e3c-05", "ovs_interfaceid": "ddce6e3c-0596-4fb1-81fc-7ad5823e1f15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1122.637626] env[61978]: DEBUG oslo_concurrency.lockutils [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] Acquired lock "refresh_cache-de0f46af-870a-4095-a417-913a2c51f66b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.637829] env[61978]: DEBUG nova.network.neutron [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Refreshing network info cache for port ddce6e3c-0596-4fb1-81fc-7ad5823e1f15 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1122.639283] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:1d:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '411f389f-4e4f-4450-891e-38944cac6135', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ddce6e3c-0596-4fb1-81fc-7ad5823e1f15', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1122.646630] env[61978]: DEBUG oslo.service.loopingcall [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1122.649856] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1122.649856] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4464c5f-5267-4681-96ed-82f96e516706 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.671599] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1122.671599] env[61978]: value = "task-1395408" [ 1122.671599] env[61978]: _type = "Task" [ 1122.671599] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.681589] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395408, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.757819] env[61978]: DEBUG nova.compute.manager [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1122.784322] env[61978]: DEBUG nova.network.neutron [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Successfully created port: 940a682a-ece9-493f-a4e0-56b30bf8bba7 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1122.985169] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 3ddf7322-5504-408f-af6c-af73fb1c4286] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1123.065264] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395406, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.120202] env[61978]: DEBUG oslo_vmware.api [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395407, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.261047} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.120202] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1123.120359] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1123.120553] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1123.120673] env[61978]: INFO nova.compute.manager [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1123.120895] env[61978]: DEBUG oslo.service.loopingcall [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1123.121111] env[61978]: DEBUG nova.compute.manager [-] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1123.121212] env[61978]: DEBUG nova.network.neutron [-] [instance: c17c986e-c008-4414-8dd1-4ea836458048] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1123.151586] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb99a3ae-4c30-469b-8f3d-9deaa9b9ec21 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.160026] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e84c5f3-4289-481e-b4e0-2adf7fade53d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.199631] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233fb540-5cf6-4c9b-ad89-59636ca2c37e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.213602] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9964e0f-6c74-4ad3-98a6-d4b966ec3da3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.218700] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395408, 'name': CreateVM_Task, 'duration_secs': 0.538686} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.219107] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1123.220297] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.220596] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.221052] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1123.221427] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1563b16d-2c44-4d2d-bd16-18dfcd7959f4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.233177] env[61978]: DEBUG nova.compute.provider_tree [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.242247] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1123.242247] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52867d25-9164-f5a5-0ef3-329c34297b01" [ 1123.242247] env[61978]: _type = "Task" [ 1123.242247] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.252182] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52867d25-9164-f5a5-0ef3-329c34297b01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.340029] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "94665d8c-df88-4ad0-bb90-547ace2d6345" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.340029] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "94665d8c-df88-4ad0-bb90-547ace2d6345" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.489917] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a4d45835-f065-445f-bcb6-d1b01d545cb0] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1123.565779] env[61978]: DEBUG oslo_vmware.api [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395406, 'name': PowerOnVM_Task, 'duration_secs': 0.567458} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.566112] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1123.566556] env[61978]: INFO nova.compute.manager [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Took 10.72 seconds to spawn the instance on the hypervisor. [ 1123.566798] env[61978]: DEBUG nova.compute.manager [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1123.569088] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff2dae3-e81c-437a-9430-1f707a35a696 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.739402] env[61978]: DEBUG nova.scheduler.client.report [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1123.757097] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1123.757388] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Processing image 2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1123.758022] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.758022] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.758022] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1123.758460] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9b337d4-14ca-47d1-9633-0158d5758169 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.770208] env[61978]: DEBUG nova.compute.manager [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1123.778912] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1123.779150] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1123.780718] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50b0a127-1d51-4c00-9cf2-6d8514d45710 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.788063] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1123.788063] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5286e626-e5b6-f502-f44d-9fb691c645df" [ 1123.788063] env[61978]: _type = "Task" [ 1123.788063] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.799997] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5286e626-e5b6-f502-f44d-9fb691c645df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.812970] env[61978]: DEBUG nova.virt.hardware [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1123.812970] env[61978]: DEBUG nova.virt.hardware [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1123.812970] env[61978]: DEBUG nova.virt.hardware [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1123.812970] env[61978]: DEBUG nova.virt.hardware [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1123.812970] env[61978]: DEBUG nova.virt.hardware [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1123.812970] env[61978]: DEBUG nova.virt.hardware [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1123.812970] env[61978]: DEBUG nova.virt.hardware [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1123.812970] env[61978]: DEBUG nova.virt.hardware [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1123.812970] env[61978]: DEBUG nova.virt.hardware [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1123.813872] env[61978]: DEBUG nova.virt.hardware [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1123.814012] env[61978]: DEBUG nova.virt.hardware [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1123.817016] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c061f09-ad51-445f-be64-b8036fc688b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.824236] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8523e3c1-9861-4b6d-bbab-523d74e7e9c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.844160] env[61978]: DEBUG nova.compute.manager [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1123.851868] env[61978]: DEBUG nova.network.neutron [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Updated VIF entry in instance network info cache for port ddce6e3c-0596-4fb1-81fc-7ad5823e1f15. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1123.852226] env[61978]: DEBUG nova.network.neutron [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Updating instance_info_cache with network_info: [{"id": "ddce6e3c-0596-4fb1-81fc-7ad5823e1f15", "address": "fa:16:3e:fe:1d:4b", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddce6e3c-05", "ovs_interfaceid": "ddce6e3c-0596-4fb1-81fc-7ad5823e1f15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.996349] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 2f5b06f6-7178-4fdf-93b6-65477f020898] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1124.092339] env[61978]: INFO nova.compute.manager [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Took 35.87 seconds to build instance. [ 1124.252160] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.252160] env[61978]: DEBUG nova.compute.manager [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1124.253834] env[61978]: DEBUG oslo_concurrency.lockutils [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.479s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.254242] env[61978]: DEBUG nova.objects.instance [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lazy-loading 'resources' on Instance uuid b932d221-aca9-4853-aa9c-2d27981e878c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1124.307266] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Preparing fetch location {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1124.308051] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Fetch image to [datastore2] OSTACK_IMG_3382b68a-d58a-4e64-bf0d-7c8342dcd183/OSTACK_IMG_3382b68a-d58a-4e64-bf0d-7c8342dcd183.vmdk {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1124.308051] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Downloading stream optimized image 2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a to [datastore2] OSTACK_IMG_3382b68a-d58a-4e64-bf0d-7c8342dcd183/OSTACK_IMG_3382b68a-d58a-4e64-bf0d-7c8342dcd183.vmdk on the data store datastore2 as vApp {{(pid=61978) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1124.308361] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Downloading image file data 2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a to the ESX as VM named 'OSTACK_IMG_3382b68a-d58a-4e64-bf0d-7c8342dcd183' {{(pid=61978) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1124.383259] env[61978]: DEBUG oslo_concurrency.lockutils [req-ad15ef65-a429-412c-b4ab-d5a4c8ae3ec4 req-4c2c4af9-1b80-4511-8edc-bfc6c460cead service nova] Releasing lock "refresh_cache-de0f46af-870a-4095-a417-913a2c51f66b" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.390268] env[61978]: DEBUG nova.compute.manager [req-5d8c73e5-dd51-448d-b950-2c8446324555 req-6513bb6c-3582-436a-8ec2-722efca1cdd5 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Received event network-vif-deleted-394a8251-684b-4ddc-ae5c-7ef7ec06b503 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1124.390610] env[61978]: INFO nova.compute.manager [req-5d8c73e5-dd51-448d-b950-2c8446324555 req-6513bb6c-3582-436a-8ec2-722efca1cdd5 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Neutron deleted interface 394a8251-684b-4ddc-ae5c-7ef7ec06b503; detaching it from the instance and deleting it from the info cache [ 1124.390899] env[61978]: DEBUG nova.network.neutron [req-5d8c73e5-dd51-448d-b950-2c8446324555 req-6513bb6c-3582-436a-8ec2-722efca1cdd5 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.430042] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.454238] env[61978]: DEBUG oslo_vmware.rw_handles [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1124.454238] env[61978]: value = "resgroup-9" [ 1124.454238] env[61978]: _type = "ResourcePool" [ 1124.454238] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1124.454238] env[61978]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a70239e9-cd61-46e2-a710-c4341cf70df9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.495925] env[61978]: DEBUG oslo_vmware.rw_handles [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lease: (returnval){ [ 1124.495925] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52df0cf8-c068-4854-b219-93d9de90f71d" [ 1124.495925] env[61978]: _type = "HttpNfcLease" [ 1124.495925] env[61978]: } obtained for vApp import into resource pool (val){ [ 1124.495925] env[61978]: value = "resgroup-9" [ 1124.495925] env[61978]: _type = "ResourcePool" [ 1124.495925] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1124.497552] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the lease: (returnval){ [ 1124.497552] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52df0cf8-c068-4854-b219-93d9de90f71d" [ 1124.497552] env[61978]: _type = "HttpNfcLease" [ 1124.497552] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1124.504288] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: d2614f71-3026-41d4-ae04-eaede9b5ead5] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1124.512102] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1124.512102] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52df0cf8-c068-4854-b219-93d9de90f71d" [ 1124.512102] env[61978]: _type = "HttpNfcLease" [ 1124.512102] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1124.593885] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e9338479-959e-46b1-ad07-3339f7684262 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.388s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.762621] env[61978]: DEBUG nova.compute.utils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1124.769479] env[61978]: DEBUG nova.compute.manager [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1124.772294] env[61978]: DEBUG nova.network.neutron [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1124.783776] env[61978]: DEBUG nova.network.neutron [-] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.875752] env[61978]: DEBUG nova.policy [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1513c7acb97e4b208c73ccde70309ad7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '198eab494c0a4e0eb83bae5732df9c78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1124.894235] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2cb695af-1672-42d0-b668-2438bfc8ab68 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.918741] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601740fa-72e5-48c4-a5a5-2058c29f438e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.960527] env[61978]: DEBUG nova.compute.manager [req-5d8c73e5-dd51-448d-b950-2c8446324555 req-6513bb6c-3582-436a-8ec2-722efca1cdd5 service nova] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Detach interface failed, port_id=394a8251-684b-4ddc-ae5c-7ef7ec06b503, reason: Instance c17c986e-c008-4414-8dd1-4ea836458048 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1125.008154] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 081339d7-6d9b-4b66-a816-467d23196c9a] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1125.012842] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1125.012842] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52df0cf8-c068-4854-b219-93d9de90f71d" [ 1125.012842] env[61978]: _type = "HttpNfcLease" [ 1125.012842] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1125.222210] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5e9c9e-00b9-42eb-bf41-9bf025b3ab93 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.235217] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1528f1-acb9-4989-ac29-7802e2abb857 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.271942] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f7e5d3-5a2a-4a64-8095-311efea92c4b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.275251] env[61978]: DEBUG nova.compute.manager [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1125.285458] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ab12be-9056-40f1-8772-714bdc2cf553 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.291628] env[61978]: INFO nova.compute.manager [-] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Took 2.17 seconds to deallocate network for instance. [ 1125.306508] env[61978]: DEBUG nova.compute.provider_tree [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1125.510029] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1125.510029] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52df0cf8-c068-4854-b219-93d9de90f71d" [ 1125.510029] env[61978]: _type = "HttpNfcLease" [ 1125.510029] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1125.510200] env[61978]: DEBUG oslo_vmware.rw_handles [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1125.510200] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52df0cf8-c068-4854-b219-93d9de90f71d" [ 1125.510200] env[61978]: _type = "HttpNfcLease" [ 1125.510200] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1125.511020] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6813525f-4a85-4b81-94f2-413eccde492d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.521221] env[61978]: DEBUG oslo_vmware.rw_handles [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5214172f-98f8-b737-cfdb-7401d3528070/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1125.521221] env[61978]: DEBUG oslo_vmware.rw_handles [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5214172f-98f8-b737-cfdb-7401d3528070/disk-0.vmdk. {{(pid=61978) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1125.597046] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a0762952-2afd-448a-8e46-ba788a4ca131] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1125.607933] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d78fafc1-8285-413f-8dbc-f009906294fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.657914] env[61978]: DEBUG nova.network.neutron [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Successfully created port: 86b0db93-0668-4d3e-9bc5-6220369d7160 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1125.812235] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.814253] env[61978]: DEBUG nova.scheduler.client.report [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1125.827169] env[61978]: DEBUG nova.network.neutron [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Successfully updated port: 940a682a-ece9-493f-a4e0-56b30bf8bba7 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1125.982405] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.982892] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.104702] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 371ddf66-a39b-41c4-bbd1-2a1c1b99834e] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1126.287278] env[61978]: DEBUG nova.compute.manager [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1126.320563] env[61978]: DEBUG nova.virt.hardware [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1126.320825] env[61978]: DEBUG nova.virt.hardware [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1126.320989] env[61978]: DEBUG nova.virt.hardware [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1126.321205] env[61978]: DEBUG nova.virt.hardware [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1126.321361] env[61978]: DEBUG nova.virt.hardware [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1126.324700] env[61978]: DEBUG nova.virt.hardware [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1126.324700] env[61978]: DEBUG nova.virt.hardware [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1126.324700] env[61978]: DEBUG nova.virt.hardware [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1126.324700] env[61978]: DEBUG nova.virt.hardware [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1126.324700] env[61978]: DEBUG nova.virt.hardware [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1126.324700] env[61978]: DEBUG nova.virt.hardware [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1126.324700] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c281013-ec11-41d8-ae00-4e916c0dbea5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.335224] env[61978]: DEBUG oslo_concurrency.lockutils [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.079s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.337430] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "refresh_cache-1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.337497] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "refresh_cache-1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.337656] env[61978]: DEBUG nova.network.neutron [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1126.341530] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.441s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.343598] env[61978]: INFO nova.compute.claims [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.353035] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecbde2c-1e00-464d-91dc-1f83ad7a15cd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.376335] env[61978]: INFO nova.scheduler.client.report [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Deleted allocations for instance b932d221-aca9-4853-aa9c-2d27981e878c [ 1126.488753] env[61978]: DEBUG nova.compute.utils [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1126.491335] env[61978]: DEBUG oslo_vmware.rw_handles [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Completed reading data from the image iterator. {{(pid=61978) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1126.491648] env[61978]: DEBUG oslo_vmware.rw_handles [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5214172f-98f8-b737-cfdb-7401d3528070/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1126.492776] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f0d773-f722-45d4-9345-f5c204cf8211 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.502529] env[61978]: DEBUG oslo_vmware.rw_handles [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5214172f-98f8-b737-cfdb-7401d3528070/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1126.502773] env[61978]: DEBUG oslo_vmware.rw_handles [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5214172f-98f8-b737-cfdb-7401d3528070/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1126.503683] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-f04ea33b-89f3-4afd-8fff-f2d4578cbbc4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.576356] env[61978]: DEBUG nova.compute.manager [req-35d443fd-e1d7-42b0-b9bd-4bb91f3366b9 req-d924cef1-66d0-4ac2-9f32-5c4bda73cf86 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Received event network-changed-28538b34-2ffa-4e6e-a451-0654e6ec063d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1126.576617] env[61978]: DEBUG nova.compute.manager [req-35d443fd-e1d7-42b0-b9bd-4bb91f3366b9 req-d924cef1-66d0-4ac2-9f32-5c4bda73cf86 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Refreshing instance network info cache due to event network-changed-28538b34-2ffa-4e6e-a451-0654e6ec063d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1126.577786] env[61978]: DEBUG oslo_concurrency.lockutils [req-35d443fd-e1d7-42b0-b9bd-4bb91f3366b9 req-d924cef1-66d0-4ac2-9f32-5c4bda73cf86 service nova] Acquiring lock "refresh_cache-ae6b92bb-6f79-4b52-bdb7-095985bf2fad" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.577786] env[61978]: DEBUG oslo_concurrency.lockutils [req-35d443fd-e1d7-42b0-b9bd-4bb91f3366b9 req-d924cef1-66d0-4ac2-9f32-5c4bda73cf86 service nova] Acquired lock "refresh_cache-ae6b92bb-6f79-4b52-bdb7-095985bf2fad" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.577786] env[61978]: DEBUG nova.network.neutron [req-35d443fd-e1d7-42b0-b9bd-4bb91f3366b9 req-d924cef1-66d0-4ac2-9f32-5c4bda73cf86 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Refreshing network info cache for port 28538b34-2ffa-4e6e-a451-0654e6ec063d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1126.610324] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: eb7cb200-c162-4e92-8916-6d9abd5cf34d] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1126.856346] env[61978]: DEBUG nova.compute.manager [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Received event network-vif-plugged-940a682a-ece9-493f-a4e0-56b30bf8bba7 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1126.856346] env[61978]: DEBUG oslo_concurrency.lockutils [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] Acquiring lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.856629] env[61978]: DEBUG oslo_concurrency.lockutils [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] Lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.856776] env[61978]: DEBUG oslo_concurrency.lockutils [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] Lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.857027] env[61978]: DEBUG nova.compute.manager [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] No waiting events found dispatching network-vif-plugged-940a682a-ece9-493f-a4e0-56b30bf8bba7 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1126.857141] env[61978]: WARNING nova.compute.manager [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Received unexpected event network-vif-plugged-940a682a-ece9-493f-a4e0-56b30bf8bba7 for instance with vm_state building and task_state spawning. [ 1126.859431] env[61978]: DEBUG nova.compute.manager [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Received event network-changed-940a682a-ece9-493f-a4e0-56b30bf8bba7 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1126.859431] env[61978]: DEBUG nova.compute.manager [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Refreshing instance network info cache due to event network-changed-940a682a-ece9-493f-a4e0-56b30bf8bba7. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1126.859431] env[61978]: DEBUG oslo_concurrency.lockutils [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] Acquiring lock "refresh_cache-1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.885946] env[61978]: DEBUG oslo_concurrency.lockutils [None req-abbde9bd-be20-46b1-8eea-4a760f864bff tempest-ServerShowV247Test-320240694 tempest-ServerShowV247Test-320240694-project-member] Lock "b932d221-aca9-4853-aa9c-2d27981e878c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.965s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.949523] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquiring lock "1eae10e8-58b1-435d-86fc-0674725ce6cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.949853] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lock "1eae10e8-58b1-435d-86fc-0674725ce6cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.956993] env[61978]: DEBUG oslo_vmware.rw_handles [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5214172f-98f8-b737-cfdb-7401d3528070/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1126.957266] env[61978]: INFO nova.virt.vmwareapi.images [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Downloaded image file data 2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a [ 1126.959180] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b35b7c-8232-49ad-aad9-2fcaa7cf12e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.978704] env[61978]: DEBUG nova.network.neutron [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1126.980745] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-185aee0d-9468-4ea7-9274-18d179e354c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.000085] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.016s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.019987] env[61978]: INFO nova.virt.vmwareapi.images [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] The imported VM was unregistered [ 1127.022472] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Caching image {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1127.022663] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating directory with path [datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1127.023305] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ab0314e-7132-4eea-a08e-06951830c9a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.039260] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Created directory with path [datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1127.039260] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_3382b68a-d58a-4e64-bf0d-7c8342dcd183/OSTACK_IMG_3382b68a-d58a-4e64-bf0d-7c8342dcd183.vmdk to [datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a.vmdk. {{(pid=61978) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1127.039260] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-22fb6998-dd91-4fa6-a2a5-5b39efe352a6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.053304] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1127.053304] env[61978]: value = "task-1395411" [ 1127.053304] env[61978]: _type = "Task" [ 1127.053304] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.067824] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395411, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.113628] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 2084a365-b662-4564-b899-ab4c4a63f2b9] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1127.452717] env[61978]: DEBUG nova.compute.manager [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1127.568271] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395411, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.597028] env[61978]: DEBUG nova.network.neutron [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Updating instance_info_cache with network_info: [{"id": "940a682a-ece9-493f-a4e0-56b30bf8bba7", "address": "fa:16:3e:b0:fa:50", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap940a682a-ec", "ovs_interfaceid": "940a682a-ece9-493f-a4e0-56b30bf8bba7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.621962] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 85fc5af8-454d-4042-841a-945b7e84eb6c] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1127.846151] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106c6470-a3bb-4702-9f41-82529fbf0372 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.855024] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad0b125-d1c2-49e6-bccb-6ac9ae64993f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.896146] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc555d4-02f6-477e-8895-6559bbe3f4dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.905413] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd2f19c-b400-4ea5-a6b8-a84e2baadd1d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.924028] env[61978]: DEBUG nova.compute.provider_tree [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.982826] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.066305] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395411, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.103889] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "refresh_cache-1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.104818] env[61978]: DEBUG nova.compute.manager [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Instance network_info: |[{"id": "940a682a-ece9-493f-a4e0-56b30bf8bba7", "address": "fa:16:3e:b0:fa:50", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap940a682a-ec", "ovs_interfaceid": "940a682a-ece9-493f-a4e0-56b30bf8bba7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1128.105288] env[61978]: DEBUG oslo_concurrency.lockutils [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] Acquired lock "refresh_cache-1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.105416] env[61978]: DEBUG nova.network.neutron [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Refreshing network info cache for port 940a682a-ece9-493f-a4e0-56b30bf8bba7 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1128.107110] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:fa:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed3ffc1d-9f86-4029-857e-6cd1d383edbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '940a682a-ece9-493f-a4e0-56b30bf8bba7', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1128.116427] env[61978]: DEBUG oslo.service.loopingcall [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1128.121217] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.121217] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.121217] env[61978]: INFO nova.compute.manager [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Attaching volume df5c8cba-6cf8-4d47-9b7b-37971eba01d7 to /dev/sdb [ 1128.122649] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1128.123397] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-77d5a0af-a627-4ca0-b4e8-9d9cd1806af6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.140916] env[61978]: DEBUG nova.network.neutron [req-35d443fd-e1d7-42b0-b9bd-4bb91f3366b9 req-d924cef1-66d0-4ac2-9f32-5c4bda73cf86 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Updated VIF entry in instance network info cache for port 28538b34-2ffa-4e6e-a451-0654e6ec063d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1128.141407] env[61978]: DEBUG nova.network.neutron [req-35d443fd-e1d7-42b0-b9bd-4bb91f3366b9 req-d924cef1-66d0-4ac2-9f32-5c4bda73cf86 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Updating instance_info_cache with network_info: [{"id": "28538b34-2ffa-4e6e-a451-0654e6ec063d", "address": "fa:16:3e:67:e7:1c", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28538b34-2f", "ovs_interfaceid": "28538b34-2ffa-4e6e-a451-0654e6ec063d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.145626] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 5d48e854-45fd-4767-91b7-100f84bdca55] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1128.158442] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1128.158442] env[61978]: value = "task-1395412" [ 1128.158442] env[61978]: _type = "Task" [ 1128.158442] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.167765] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395412, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.173173] env[61978]: DEBUG nova.network.neutron [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Successfully updated port: 86b0db93-0668-4d3e-9bc5-6220369d7160 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1128.205980] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fadff1-6394-4769-8853-778d0dddccfc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.217171] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6b73c1-7ffb-4a5b-b2af-af4caebe6da0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.237404] env[61978]: DEBUG nova.virt.block_device [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updating existing volume attachment record: a31f5f04-b49e-495e-b7e2-be3c9a887ea9 {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1128.412185] env[61978]: DEBUG nova.network.neutron [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Updated VIF entry in instance network info cache for port 940a682a-ece9-493f-a4e0-56b30bf8bba7. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1128.412593] env[61978]: DEBUG nova.network.neutron [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Updating instance_info_cache with network_info: [{"id": "940a682a-ece9-493f-a4e0-56b30bf8bba7", "address": "fa:16:3e:b0:fa:50", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap940a682a-ec", "ovs_interfaceid": "940a682a-ece9-493f-a4e0-56b30bf8bba7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.429111] env[61978]: DEBUG nova.scheduler.client.report [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1128.568055] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395411, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.648966] env[61978]: DEBUG oslo_concurrency.lockutils [req-35d443fd-e1d7-42b0-b9bd-4bb91f3366b9 req-d924cef1-66d0-4ac2-9f32-5c4bda73cf86 service nova] Releasing lock "refresh_cache-ae6b92bb-6f79-4b52-bdb7-095985bf2fad" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.649569] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 66ee1fd7-40f7-461f-b0c6-5951a58ac660] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1128.668375] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395412, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.676605] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "refresh_cache-35a6d3ec-8688-43c2-93c4-b23033aaf280" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.676809] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired lock "refresh_cache-35a6d3ec-8688-43c2-93c4-b23033aaf280" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.677069] env[61978]: DEBUG nova.network.neutron [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1128.887496] env[61978]: DEBUG nova.compute.manager [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Received event network-vif-plugged-86b0db93-0668-4d3e-9bc5-6220369d7160 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1128.887496] env[61978]: DEBUG oslo_concurrency.lockutils [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] Acquiring lock "35a6d3ec-8688-43c2-93c4-b23033aaf280-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.887692] env[61978]: DEBUG oslo_concurrency.lockutils [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] Lock "35a6d3ec-8688-43c2-93c4-b23033aaf280-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.887841] env[61978]: DEBUG oslo_concurrency.lockutils [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] Lock "35a6d3ec-8688-43c2-93c4-b23033aaf280-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.888076] env[61978]: DEBUG nova.compute.manager [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] No waiting events found dispatching network-vif-plugged-86b0db93-0668-4d3e-9bc5-6220369d7160 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1128.888293] env[61978]: WARNING nova.compute.manager [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Received unexpected event network-vif-plugged-86b0db93-0668-4d3e-9bc5-6220369d7160 for instance with vm_state building and task_state spawning. [ 1128.888469] env[61978]: DEBUG nova.compute.manager [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Received event network-changed-86b0db93-0668-4d3e-9bc5-6220369d7160 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1128.888630] env[61978]: DEBUG nova.compute.manager [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Refreshing instance network info cache due to event network-changed-86b0db93-0668-4d3e-9bc5-6220369d7160. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1128.888811] env[61978]: DEBUG oslo_concurrency.lockutils [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] Acquiring lock "refresh_cache-35a6d3ec-8688-43c2-93c4-b23033aaf280" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.915452] env[61978]: DEBUG oslo_concurrency.lockutils [req-f51caf1d-6a44-4b54-b6f2-4d2cc3522afe req-03f06723-1536-4126-9952-554610170535 service nova] Releasing lock "refresh_cache-1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.934623] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.934937] env[61978]: DEBUG nova.compute.manager [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1128.937819] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.996s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.938141] env[61978]: DEBUG nova.objects.instance [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lazy-loading 'resources' on Instance uuid c861eaa2-1c57-476f-92b3-886c8e44f6b4 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1129.027471] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.028058] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.028187] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.028395] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.028649] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.031265] env[61978]: INFO nova.compute.manager [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Terminating instance [ 1129.036663] env[61978]: DEBUG nova.compute.manager [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1129.036799] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1129.037910] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25a1f68-5526-4994-b143-cb414868e50a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.047854] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1129.048213] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83e4a8ac-511a-4b3f-a6be-1f48d5ad5aec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.056163] env[61978]: DEBUG oslo_vmware.api [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 1129.056163] env[61978]: value = "task-1395416" [ 1129.056163] env[61978]: _type = "Task" [ 1129.056163] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.069989] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395411, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.073740] env[61978]: DEBUG oslo_vmware.api [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395416, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.153799] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: e9e2deb5-5bf9-4b57-832f-9928d3cda162] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1129.173800] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395412, 'name': CreateVM_Task, 'duration_secs': 0.985452} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.174060] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1129.174716] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.175424] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.175424] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1129.175573] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3a62965-b70c-4fc7-ab5c-c70a0bef7dc8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.185742] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1129.185742] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ea978a-68d0-a110-91e2-416b5d53b387" [ 1129.185742] env[61978]: _type = "Task" [ 1129.185742] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.198738] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ea978a-68d0-a110-91e2-416b5d53b387, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.216654] env[61978]: DEBUG nova.network.neutron [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1129.227721] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Acquiring lock "7d388d5c-2120-4dc5-a04f-5394e1e6f852" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.228049] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Lock "7d388d5c-2120-4dc5-a04f-5394e1e6f852" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.393373] env[61978]: DEBUG nova.network.neutron [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Updating instance_info_cache with network_info: [{"id": "86b0db93-0668-4d3e-9bc5-6220369d7160", "address": "fa:16:3e:f8:46:00", "network": {"id": "aa63ec3f-fde3-49f2-ab12-71ae85601428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-445619089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198eab494c0a4e0eb83bae5732df9c78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "650f7968-4522-4ba5-8304-1b9949951ed7", "external-id": "nsx-vlan-transportzone-568", "segmentation_id": 568, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86b0db93-06", "ovs_interfaceid": "86b0db93-0668-4d3e-9bc5-6220369d7160", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.441381] env[61978]: DEBUG nova.compute.utils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1129.445859] env[61978]: DEBUG nova.compute.manager [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Not allocating networking since 'none' was specified. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1129.574533] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395411, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.580738] env[61978]: DEBUG oslo_vmware.api [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395416, 'name': PowerOffVM_Task, 'duration_secs': 0.363219} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.581434] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1129.581642] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1129.581923] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1064188f-d055-4060-afd3-f42f9c7483c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.659608] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: e249c706-3196-4593-ae96-53f2619e0243] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1129.663498] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1129.663737] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1129.663950] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Deleting the datastore file [datastore1] 4c7053ee-7c44-49ee-8d30-bf14686c6b1c {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1129.664534] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea502950-e222-4174-8643-7b9799051be3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.673119] env[61978]: DEBUG oslo_vmware.api [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for the task: (returnval){ [ 1129.673119] env[61978]: value = "task-1395418" [ 1129.673119] env[61978]: _type = "Task" [ 1129.673119] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.682310] env[61978]: DEBUG oslo_vmware.api [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395418, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.701042] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ea978a-68d0-a110-91e2-416b5d53b387, 'name': SearchDatastore_Task, 'duration_secs': 0.096664} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.701452] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1129.701739] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1129.702031] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.702226] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.702444] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1129.702773] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f54be17-aad4-45fa-bc1c-835bdb39087b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.723063] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1129.723304] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1129.724313] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f63f92ec-ac56-4f18-a81f-95d7842f51a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.731770] env[61978]: DEBUG nova.compute.manager [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1129.735356] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1129.735356] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ad729b-3e26-38f2-720c-7032e4030aa1" [ 1129.735356] env[61978]: _type = "Task" [ 1129.735356] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.751323] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ad729b-3e26-38f2-720c-7032e4030aa1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.805077] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deed8592-b55a-4ea3-b5fb-6d4419b71271 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.813605] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83eeba5-d509-4e68-a304-8f06d75ca5b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.844202] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39deb0c7-8253-4564-b6ec-c8094139c0b4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.851811] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0232bc2-1aa0-4d0f-b2e2-2b5ef3e691ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.864946] env[61978]: DEBUG nova.compute.provider_tree [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.896280] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Releasing lock "refresh_cache-35a6d3ec-8688-43c2-93c4-b23033aaf280" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1129.896923] env[61978]: DEBUG nova.compute.manager [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Instance network_info: |[{"id": "86b0db93-0668-4d3e-9bc5-6220369d7160", "address": "fa:16:3e:f8:46:00", "network": {"id": "aa63ec3f-fde3-49f2-ab12-71ae85601428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-445619089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198eab494c0a4e0eb83bae5732df9c78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "650f7968-4522-4ba5-8304-1b9949951ed7", "external-id": "nsx-vlan-transportzone-568", "segmentation_id": 568, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86b0db93-06", "ovs_interfaceid": "86b0db93-0668-4d3e-9bc5-6220369d7160", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1129.896923] env[61978]: DEBUG oslo_concurrency.lockutils [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] Acquired lock "refresh_cache-35a6d3ec-8688-43c2-93c4-b23033aaf280" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.897218] env[61978]: DEBUG nova.network.neutron [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Refreshing network info cache for port 86b0db93-0668-4d3e-9bc5-6220369d7160 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1129.898301] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:46:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '650f7968-4522-4ba5-8304-1b9949951ed7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '86b0db93-0668-4d3e-9bc5-6220369d7160', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1129.906033] env[61978]: DEBUG oslo.service.loopingcall [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1129.908590] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1129.909097] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6784254-50c0-4fee-95e1-efab90974c0b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.932337] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1129.932337] env[61978]: value = "task-1395419" [ 1129.932337] env[61978]: _type = "Task" [ 1129.932337] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.946795] env[61978]: DEBUG nova.compute.manager [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1129.949386] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395419, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.066948] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395411, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.600018} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.067263] env[61978]: INFO nova.virt.vmwareapi.ds_util [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_3382b68a-d58a-4e64-bf0d-7c8342dcd183/OSTACK_IMG_3382b68a-d58a-4e64-bf0d-7c8342dcd183.vmdk to [datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a.vmdk. [ 1130.067457] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Cleaning up location [datastore2] OSTACK_IMG_3382b68a-d58a-4e64-bf0d-7c8342dcd183 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1130.067627] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_3382b68a-d58a-4e64-bf0d-7c8342dcd183 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1130.067882] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c6b4258-09d4-47d6-8bde-f3cd0072b3bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.076455] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1130.076455] env[61978]: value = "task-1395420" [ 1130.076455] env[61978]: _type = "Task" [ 1130.076455] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.086543] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.155501] env[61978]: DEBUG nova.network.neutron [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Updated VIF entry in instance network info cache for port 86b0db93-0668-4d3e-9bc5-6220369d7160. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1130.155978] env[61978]: DEBUG nova.network.neutron [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Updating instance_info_cache with network_info: [{"id": "86b0db93-0668-4d3e-9bc5-6220369d7160", "address": "fa:16:3e:f8:46:00", "network": {"id": "aa63ec3f-fde3-49f2-ab12-71ae85601428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-445619089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198eab494c0a4e0eb83bae5732df9c78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "650f7968-4522-4ba5-8304-1b9949951ed7", "external-id": "nsx-vlan-transportzone-568", "segmentation_id": 568, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86b0db93-06", "ovs_interfaceid": "86b0db93-0668-4d3e-9bc5-6220369d7160", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.165598] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 892b03e7-a9fc-4b53-bffd-d8b090cbb9ed] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1130.187695] env[61978]: DEBUG oslo_vmware.api [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Task: {'id': task-1395418, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21389} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.188088] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1130.188354] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1130.188788] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1130.188889] env[61978]: INFO nova.compute.manager [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1130.189234] env[61978]: DEBUG oslo.service.loopingcall [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1130.189590] env[61978]: DEBUG nova.compute.manager [-] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1130.189668] env[61978]: DEBUG nova.network.neutron [-] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1130.255920] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ad729b-3e26-38f2-720c-7032e4030aa1, 'name': SearchDatastore_Task, 'duration_secs': 0.022242} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.257248] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7201cc00-4564-43f0-ab79-4ad7fd1df472 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.263968] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.264335] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1130.264335] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528c16f5-92a6-b286-729e-24f9b94c0b54" [ 1130.264335] env[61978]: _type = "Task" [ 1130.264335] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.272534] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528c16f5-92a6-b286-729e-24f9b94c0b54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.368380] env[61978]: DEBUG nova.scheduler.client.report [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1130.442907] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395419, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.588192] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.045601} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.588192] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1130.588192] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1130.588192] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a.vmdk to [datastore2] de0f46af-870a-4095-a417-913a2c51f66b/de0f46af-870a-4095-a417-913a2c51f66b.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1130.588192] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d990f020-baed-40f3-bae8-55b0d0b30335 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.595439] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1130.595439] env[61978]: value = "task-1395421" [ 1130.595439] env[61978]: _type = "Task" [ 1130.595439] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.604415] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395421, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.659040] env[61978]: DEBUG oslo_concurrency.lockutils [req-700d33ef-ea85-42b7-b99f-8a09ad1bcf8b req-2b19c62c-5159-4c77-b0d7-74cf0584d13b service nova] Releasing lock "refresh_cache-35a6d3ec-8688-43c2-93c4-b23033aaf280" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1130.673171] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 96a38ed0-c880-4f21-9389-99f039279072] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1130.777041] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528c16f5-92a6-b286-729e-24f9b94c0b54, 'name': SearchDatastore_Task, 'duration_secs': 0.010422} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.777041] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1130.777041] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d/1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1130.777041] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f16077b3-44c5-4d48-bae1-d16fc5b5dd35 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.785442] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1130.785442] env[61978]: value = "task-1395423" [ 1130.785442] env[61978]: _type = "Task" [ 1130.785442] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.794358] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395423, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.873338] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.935s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.877056] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.674s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.879014] env[61978]: INFO nova.compute.claims [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1130.905387] env[61978]: INFO nova.scheduler.client.report [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Deleted allocations for instance c861eaa2-1c57-476f-92b3-886c8e44f6b4 [ 1130.946454] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395419, 'name': CreateVM_Task, 'duration_secs': 0.804665} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.946692] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1130.947845] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1130.948139] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.948489] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1130.948788] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf7a26de-8356-4f53-9268-61f3b10fe438 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.956746] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1130.956746] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5238b662-cf0b-8b2e-8d36-f0e6524866d3" [ 1130.956746] env[61978]: _type = "Task" [ 1130.956746] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.961074] env[61978]: DEBUG nova.compute.manager [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1130.975481] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5238b662-cf0b-8b2e-8d36-f0e6524866d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.994239] env[61978]: DEBUG nova.virt.hardware [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1130.994402] env[61978]: DEBUG nova.virt.hardware [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1130.994531] env[61978]: DEBUG nova.virt.hardware [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1130.994722] env[61978]: DEBUG nova.virt.hardware [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1130.994908] env[61978]: DEBUG nova.virt.hardware [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1130.995111] env[61978]: DEBUG nova.virt.hardware [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1130.995302] env[61978]: DEBUG nova.virt.hardware [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1130.995502] env[61978]: DEBUG nova.virt.hardware [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1130.995768] env[61978]: DEBUG nova.virt.hardware [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1130.995972] env[61978]: DEBUG nova.virt.hardware [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1130.996176] env[61978]: DEBUG nova.virt.hardware [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1130.997418] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6fc033-7533-4d72-abea-899d1eb6f6c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.009776] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb8ec83-3683-4b8a-9a5f-827f7d018b6b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.015913] env[61978]: DEBUG nova.compute.manager [req-c18b567e-717e-4ddc-8d6e-40d582debbe4 req-73ef34ed-52a5-49a1-93f4-294cda8deae2 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Received event network-vif-deleted-6324dacc-b741-4de5-8ded-34326888d25f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1131.016445] env[61978]: INFO nova.compute.manager [req-c18b567e-717e-4ddc-8d6e-40d582debbe4 req-73ef34ed-52a5-49a1-93f4-294cda8deae2 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Neutron deleted interface 6324dacc-b741-4de5-8ded-34326888d25f; detaching it from the instance and deleting it from the info cache [ 1131.016640] env[61978]: DEBUG nova.network.neutron [req-c18b567e-717e-4ddc-8d6e-40d582debbe4 req-73ef34ed-52a5-49a1-93f4-294cda8deae2 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.029834] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Instance VIF info [] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1131.038916] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Creating folder: Project (ae39b841952341bd83c843f59cd7de5d). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1131.040177] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f84dedd-fb86-4383-af83-7f773b8f91ca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.056606] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Created folder: Project (ae39b841952341bd83c843f59cd7de5d) in parent group-v295764. [ 1131.056606] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Creating folder: Instances. Parent ref: group-v295960. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1131.056606] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ca56992-700e-4175-b89d-71bb71555792 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.069799] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Created folder: Instances in parent group-v295960. [ 1131.070074] env[61978]: DEBUG oslo.service.loopingcall [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1131.070292] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1131.070896] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea368847-8031-49e2-8f1f-941baaac015f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.090425] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1131.090425] env[61978]: value = "task-1395426" [ 1131.090425] env[61978]: _type = "Task" [ 1131.090425] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.102045] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395426, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.113398] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395421, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.136034] env[61978]: DEBUG nova.network.neutron [-] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.178227] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 9b6b4da7-4f86-46bc-a75f-fc5e1126c53b] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1131.301437] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395423, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.421273] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0af8d2db-bf37-480e-a220-e8fa1e3c1f88 tempest-ServersTestMultiNic-747981156 tempest-ServersTestMultiNic-747981156-project-member] Lock "c861eaa2-1c57-476f-92b3-886c8e44f6b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.625s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.476577] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5238b662-cf0b-8b2e-8d36-f0e6524866d3, 'name': SearchDatastore_Task, 'duration_secs': 0.087284} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.477050] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.477339] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1131.477854] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.477854] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.478110] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1131.478514] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-005b7164-1acf-4900-b428-bf0f1c33b945 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.504731] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1131.504961] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1131.505883] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d17a6f67-91b9-443f-9168-31cf4d678b15 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.516757] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1131.516757] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5235758f-08d6-505a-e5f8-01f3190edfc9" [ 1131.516757] env[61978]: _type = "Task" [ 1131.516757] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.520851] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af431be5-6d43-4783-9353-1883ed1f3e56 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.534153] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e040b1b-f4d0-4212-a805-8a359a71d14e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.545134] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5235758f-08d6-505a-e5f8-01f3190edfc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.575234] env[61978]: DEBUG nova.compute.manager [req-c18b567e-717e-4ddc-8d6e-40d582debbe4 req-73ef34ed-52a5-49a1-93f4-294cda8deae2 service nova] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Detach interface failed, port_id=6324dacc-b741-4de5-8ded-34326888d25f, reason: Instance 4c7053ee-7c44-49ee-8d30-bf14686c6b1c could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1131.605488] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395426, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.607912] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395421, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.639071] env[61978]: INFO nova.compute.manager [-] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Took 1.45 seconds to deallocate network for instance. [ 1131.682160] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.799056] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395423, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.031778] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5235758f-08d6-505a-e5f8-01f3190edfc9, 'name': SearchDatastore_Task, 'duration_secs': 0.094455} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.032576] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b33b6ad8-13dc-4e81-92f4-cfe6e4ed7353 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.039457] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1132.039457] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5254a0d6-8644-f0d6-b2fb-4fc39f4b49c8" [ 1132.039457] env[61978]: _type = "Task" [ 1132.039457] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.049147] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5254a0d6-8644-f0d6-b2fb-4fc39f4b49c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.105710] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395426, 'name': CreateVM_Task, 'duration_secs': 0.851469} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.106414] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1132.107011] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1132.107213] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.107954] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1132.108279] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37f1899b-dcff-423b-ad17-1e49719985e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.116897] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Waiting for the task: (returnval){ [ 1132.116897] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aafb5b-b9d7-6077-32db-9b34e9e80555" [ 1132.116897] env[61978]: _type = "Task" [ 1132.116897] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.117252] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395421, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.135956] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aafb5b-b9d7-6077-32db-9b34e9e80555, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.147456] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.234737] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fddf753-c91f-4f17-b527-1526531ddf8b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.250632] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e10ebac-4b1f-4473-87ac-c44b51e6b188 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.293402] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e958eb58-0fa7-4651-8bc0-8aa7915ca075 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.305356] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77539f6b-77fc-4683-8a5a-c4cc7a14e835 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.309572] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395423, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.325991] env[61978]: DEBUG nova.compute.provider_tree [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1132.553328] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5254a0d6-8644-f0d6-b2fb-4fc39f4b49c8, 'name': SearchDatastore_Task, 'duration_secs': 0.089067} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.553876] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.554269] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 35a6d3ec-8688-43c2-93c4-b23033aaf280/35a6d3ec-8688-43c2-93c4-b23033aaf280.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1132.554589] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94711e31-141b-4f7f-a9d4-dcaa36cc179f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.567024] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1132.567024] env[61978]: value = "task-1395427" [ 1132.567024] env[61978]: _type = "Task" [ 1132.567024] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.580281] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395427, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.613653] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395421, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.634481] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aafb5b-b9d7-6077-32db-9b34e9e80555, 'name': SearchDatastore_Task, 'duration_secs': 0.089843} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.634927] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.635615] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1132.635615] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1132.635615] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.635869] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1132.636709] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a2b9e16-31c6-4bb7-b5f2-fca25654b747 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.659597] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1132.659791] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1132.660575] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc5744d0-4e8c-49dc-a0e1-dda9f0aa2dcd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.672404] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Waiting for the task: (returnval){ [ 1132.672404] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520bc5a3-7bee-f7d3-11e6-38219a412ea0" [ 1132.672404] env[61978]: _type = "Task" [ 1132.672404] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.681480] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.681858] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.693024] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520bc5a3-7bee-f7d3-11e6-38219a412ea0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.802852] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395423, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.804102] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1132.804339] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295958', 'volume_id': 'df5c8cba-6cf8-4d47-9b7b-37971eba01d7', 'name': 'volume-df5c8cba-6cf8-4d47-9b7b-37971eba01d7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9ee04ee8-98ec-4be9-935d-cad7cd176466', 'attached_at': '', 'detached_at': '', 'volume_id': 'df5c8cba-6cf8-4d47-9b7b-37971eba01d7', 'serial': 'df5c8cba-6cf8-4d47-9b7b-37971eba01d7'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1132.805202] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee1751f-660b-416d-91d5-fec90bce53a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.829711] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7608326a-363c-4579-8a4e-8adede7dd910 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.833284] env[61978]: DEBUG nova.scheduler.client.report [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1132.868295] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] volume-df5c8cba-6cf8-4d47-9b7b-37971eba01d7/volume-df5c8cba-6cf8-4d47-9b7b-37971eba01d7.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1132.868295] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77cf9559-0288-44c2-9ad5-c49a6230b3a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.890482] env[61978]: DEBUG oslo_vmware.api [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1132.890482] env[61978]: value = "task-1395428" [ 1132.890482] env[61978]: _type = "Task" [ 1132.890482] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.904877] env[61978]: DEBUG oslo_vmware.api [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395428, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.081553] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395427, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.113647] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395421, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.191050] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520bc5a3-7bee-f7d3-11e6-38219a412ea0, 'name': SearchDatastore_Task, 'duration_secs': 0.092222} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.191362] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.191673] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1133.192833] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4240689b-461c-455d-8193-bf36b2a842da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.204104] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Waiting for the task: (returnval){ [ 1133.204104] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f43251-d686-d62b-fda1-13f049789465" [ 1133.204104] env[61978]: _type = "Task" [ 1133.204104] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.215835] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f43251-d686-d62b-fda1-13f049789465, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.303612] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395423, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.338958] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.463s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.339566] env[61978]: DEBUG nova.compute.manager [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1133.349491] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.483s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.349491] env[61978]: DEBUG nova.objects.instance [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lazy-loading 'resources' on Instance uuid cf6d8815-ed87-4629-9df9-6f406ac2fe6e {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.405964] env[61978]: DEBUG oslo_vmware.api [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395428, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.583656] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395427, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.618220] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395421, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.663976} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.618220] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a/2448edd8-9b0d-4c87-9ee4-c9ce6ffb100a.vmdk to [datastore2] de0f46af-870a-4095-a417-913a2c51f66b/de0f46af-870a-4095-a417-913a2c51f66b.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1133.618220] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637b3c7e-dce7-478d-af3c-a87330c4496a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.649428] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] de0f46af-870a-4095-a417-913a2c51f66b/de0f46af-870a-4095-a417-913a2c51f66b.vmdk or device None with type streamOptimized {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1133.649816] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df70cd2b-6794-4332-a811-32ac00f13191 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.676209] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1133.676209] env[61978]: value = "task-1395429" [ 1133.676209] env[61978]: _type = "Task" [ 1133.676209] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.687955] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395429, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.716027] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f43251-d686-d62b-fda1-13f049789465, 'name': SearchDatastore_Task, 'duration_secs': 0.054501} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.716141] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.717847] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 38e4f039-20bc-4bed-b449-227bde070ed9/38e4f039-20bc-4bed-b449-227bde070ed9.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1133.717847] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e80dd81-12e1-4d65-ba6d-c8185ef32fb6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.735559] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Waiting for the task: (returnval){ [ 1133.735559] env[61978]: value = "task-1395430" [ 1133.735559] env[61978]: _type = "Task" [ 1133.735559] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.737045] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.737203] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquired lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.737518] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Forcefully refreshing network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1133.752126] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.801163] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395423, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.533749} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.801577] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d/1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1133.801940] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1133.802350] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f00e8fc-9b81-4d54-8fec-d2566323ec79 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.810476] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1133.810476] env[61978]: value = "task-1395431" [ 1133.810476] env[61978]: _type = "Task" [ 1133.810476] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.820438] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395431, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.851999] env[61978]: DEBUG nova.compute.utils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1133.854331] env[61978]: DEBUG nova.compute.manager [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1133.854694] env[61978]: DEBUG nova.network.neutron [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1133.909105] env[61978]: DEBUG oslo_vmware.api [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395428, 'name': ReconfigVM_Task, 'duration_secs': 0.888444} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.912933] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Reconfigured VM instance instance-0000003a to attach disk [datastore2] volume-df5c8cba-6cf8-4d47-9b7b-37971eba01d7/volume-df5c8cba-6cf8-4d47-9b7b-37971eba01d7.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1133.923767] env[61978]: DEBUG nova.policy [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65cc77a6782d42dd80d174df20fee70a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df79d3305e464a6b83f18497a2464140', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1133.926750] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74319c54-5d4b-42df-b6b4-c180746c81d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.960957] env[61978]: DEBUG oslo_vmware.api [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1133.960957] env[61978]: value = "task-1395432" [ 1133.960957] env[61978]: _type = "Task" [ 1133.960957] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.985567] env[61978]: DEBUG oslo_vmware.api [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395432, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.085936] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395427, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.162994} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.086345] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 35a6d3ec-8688-43c2-93c4-b23033aaf280/35a6d3ec-8688-43c2-93c4-b23033aaf280.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1134.086643] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1134.086884] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2353f39-a92d-457a-ab88-57a5681c86f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.098620] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1134.098620] env[61978]: value = "task-1395433" [ 1134.098620] env[61978]: _type = "Task" [ 1134.098620] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.114376] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395433, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.191743] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395429, 'name': ReconfigVM_Task, 'duration_secs': 0.428143} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.192089] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Reconfigured VM instance instance-00000047 to attach disk [datastore2] de0f46af-870a-4095-a417-913a2c51f66b/de0f46af-870a-4095-a417-913a2c51f66b.vmdk or device None with type streamOptimized {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1134.192886] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e5256b3-e233-4347-8f0f-1d09e2e093c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.200981] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1134.200981] env[61978]: value = "task-1395434" [ 1134.200981] env[61978]: _type = "Task" [ 1134.200981] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.210658] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395434, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.254656] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395430, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489286} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.255925] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 38e4f039-20bc-4bed-b449-227bde070ed9/38e4f039-20bc-4bed-b449-227bde070ed9.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1134.256352] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1134.257233] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe85dc0-b97a-42b9-8f84-bcace962be27 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.260355] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28c1fd96-17f2-4901-b18e-6772f4c2d905 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.267988] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1557ca5d-5848-4091-a091-66b44d7a66ec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.272297] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Waiting for the task: (returnval){ [ 1134.272297] env[61978]: value = "task-1395435" [ 1134.272297] env[61978]: _type = "Task" [ 1134.272297] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.307988] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5504a9-8818-4dd6-8f5e-db8ba608eb48 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.314711] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395435, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.326808] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11288d51-c90b-476c-8462-1f76e6a18419 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.331181] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395431, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128099} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.332384] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1134.333507] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f9ec1a-ced5-4b4c-a9d1-046ffa3f55d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.344186] env[61978]: DEBUG nova.compute.provider_tree [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.346135] env[61978]: DEBUG nova.network.neutron [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Successfully created port: e0d58422-d319-4563-81b9-65c067c4b306 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1134.367658] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d/1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1134.368883] env[61978]: DEBUG nova.compute.manager [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1134.371430] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3daaedd8-fb65-4165-9217-a277a72e6275 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.394408] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1134.394408] env[61978]: value = "task-1395436" [ 1134.394408] env[61978]: _type = "Task" [ 1134.394408] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.403030] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395436, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.472563] env[61978]: DEBUG oslo_vmware.api [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395432, 'name': ReconfigVM_Task, 'duration_secs': 0.245406} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.472881] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295958', 'volume_id': 'df5c8cba-6cf8-4d47-9b7b-37971eba01d7', 'name': 'volume-df5c8cba-6cf8-4d47-9b7b-37971eba01d7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9ee04ee8-98ec-4be9-935d-cad7cd176466', 'attached_at': '', 'detached_at': '', 'volume_id': 'df5c8cba-6cf8-4d47-9b7b-37971eba01d7', 'serial': 'df5c8cba-6cf8-4d47-9b7b-37971eba01d7'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1134.609559] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395433, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135674} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.609800] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1134.610589] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51e7c3c-34ed-4b49-93fa-607cfcec1203 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.633199] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 35a6d3ec-8688-43c2-93c4-b23033aaf280/35a6d3ec-8688-43c2-93c4-b23033aaf280.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1134.633717] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7348746b-bc65-40d1-bf15-5f3e16613033 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.653769] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1134.653769] env[61978]: value = "task-1395437" [ 1134.653769] env[61978]: _type = "Task" [ 1134.653769] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.663237] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395437, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.713455] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395434, 'name': Rename_Task, 'duration_secs': 0.353599} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.713455] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1134.713455] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c35f0b79-f466-42a7-bb39-1162955bac96 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.721026] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1134.721026] env[61978]: value = "task-1395438" [ 1134.721026] env[61978]: _type = "Task" [ 1134.721026] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.726979] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395438, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.782316] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395435, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.253507} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.782873] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1134.783839] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237f7913-84d3-4904-8b7e-52d4311070fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.805042] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 38e4f039-20bc-4bed-b449-227bde070ed9/38e4f039-20bc-4bed-b449-227bde070ed9.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1134.807149] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac6b39de-b57b-4376-a340-9511354b4d21 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.829846] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Waiting for the task: (returnval){ [ 1134.829846] env[61978]: value = "task-1395439" [ 1134.829846] env[61978]: _type = "Task" [ 1134.829846] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.839190] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395439, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.849588] env[61978]: DEBUG nova.scheduler.client.report [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1134.907142] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395436, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.042494] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updating instance_info_cache with network_info: [{"id": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "address": "fa:16:3e:0c:ea:2e", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2daa968c-ac", "ovs_interfaceid": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.165627] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395437, 'name': ReconfigVM_Task, 'duration_secs': 0.289209} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.165838] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 35a6d3ec-8688-43c2-93c4-b23033aaf280/35a6d3ec-8688-43c2-93c4-b23033aaf280.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1135.168180] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee4715fe-389b-4cbe-8291-d2daf28429e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.174794] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1135.174794] env[61978]: value = "task-1395440" [ 1135.174794] env[61978]: _type = "Task" [ 1135.174794] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.184651] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395440, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.229759] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395438, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.349203] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395439, 'name': ReconfigVM_Task, 'duration_secs': 0.480745} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.349697] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 38e4f039-20bc-4bed-b449-227bde070ed9/38e4f039-20bc-4bed-b449-227bde070ed9.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1135.350693] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b768139f-c3ac-463e-b725-3957ce97dcdc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.355677] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.008s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.359590] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.931s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.361399] env[61978]: INFO nova.compute.claims [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1135.364390] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Waiting for the task: (returnval){ [ 1135.364390] env[61978]: value = "task-1395441" [ 1135.364390] env[61978]: _type = "Task" [ 1135.364390] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.374958] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395441, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.384178] env[61978]: INFO nova.scheduler.client.report [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Deleted allocations for instance cf6d8815-ed87-4629-9df9-6f406ac2fe6e [ 1135.393912] env[61978]: DEBUG nova.compute.manager [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1135.408364] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395436, 'name': ReconfigVM_Task, 'duration_secs': 0.608342} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.408746] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d/1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1135.409350] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99ac1768-9ff1-4084-8d77-a60f982acf59 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.419554] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1135.419554] env[61978]: value = "task-1395442" [ 1135.419554] env[61978]: _type = "Task" [ 1135.419554] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.424872] env[61978]: DEBUG nova.virt.hardware [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1135.425642] env[61978]: DEBUG nova.virt.hardware [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1135.425881] env[61978]: DEBUG nova.virt.hardware [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1135.426196] env[61978]: DEBUG nova.virt.hardware [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1135.426303] env[61978]: DEBUG nova.virt.hardware [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1135.426457] env[61978]: DEBUG nova.virt.hardware [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1135.426837] env[61978]: DEBUG nova.virt.hardware [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1135.427037] env[61978]: DEBUG nova.virt.hardware [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1135.427309] env[61978]: DEBUG nova.virt.hardware [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1135.427621] env[61978]: DEBUG nova.virt.hardware [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1135.427689] env[61978]: DEBUG nova.virt.hardware [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1135.429203] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbb7ca0-3d52-432d-a554-763ab41a0fea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.439536] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395442, 'name': Rename_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.443540] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1b9a78-00b3-40b0-adb9-4d999b50d11b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.521928] env[61978]: DEBUG nova.objects.instance [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lazy-loading 'flavor' on Instance uuid 9ee04ee8-98ec-4be9-935d-cad7cd176466 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1135.543407] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Releasing lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.543683] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updated the network info_cache for instance {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1135.543830] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.544350] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.544529] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.544685] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.544831] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.544977] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_power_states {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.685640] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395440, 'name': Rename_Task, 'duration_secs': 0.164992} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.685995] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1135.686269] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c65d8964-7ab5-4c10-a737-fe241e397759 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.693209] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1135.693209] env[61978]: value = "task-1395443" [ 1135.693209] env[61978]: _type = "Task" [ 1135.693209] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.702393] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395443, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.730055] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395438, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.880225] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395441, 'name': Rename_Task, 'duration_secs': 0.30652} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.881188] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1135.881846] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45f22f8e-602e-403e-8a13-29dc0da13114 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.892018] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9317d78d-1618-4ccb-85e7-9a4d0eb1b205 tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "cf6d8815-ed87-4629-9df9-6f406ac2fe6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.256s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.894045] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Waiting for the task: (returnval){ [ 1135.894045] env[61978]: value = "task-1395444" [ 1135.894045] env[61978]: _type = "Task" [ 1135.894045] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.904254] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395444, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.929554] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395442, 'name': Rename_Task, 'duration_secs': 0.333981} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.929791] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1135.930068] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f02c66b-25b6-4d49-9495-6bfef6fb9c4a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.937319] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1135.937319] env[61978]: value = "task-1395445" [ 1135.937319] env[61978]: _type = "Task" [ 1135.937319] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.949065] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395445, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.995680] env[61978]: DEBUG nova.compute.manager [req-4f53a3a3-16de-4306-8272-2ccaa4126c9b req-0924b4e9-ebaa-4edf-9e82-7e6f5ba4fdb0 service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Received event network-vif-plugged-e0d58422-d319-4563-81b9-65c067c4b306 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1135.995680] env[61978]: DEBUG oslo_concurrency.lockutils [req-4f53a3a3-16de-4306-8272-2ccaa4126c9b req-0924b4e9-ebaa-4edf-9e82-7e6f5ba4fdb0 service nova] Acquiring lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.995680] env[61978]: DEBUG oslo_concurrency.lockutils [req-4f53a3a3-16de-4306-8272-2ccaa4126c9b req-0924b4e9-ebaa-4edf-9e82-7e6f5ba4fdb0 service nova] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.996349] env[61978]: DEBUG oslo_concurrency.lockutils [req-4f53a3a3-16de-4306-8272-2ccaa4126c9b req-0924b4e9-ebaa-4edf-9e82-7e6f5ba4fdb0 service nova] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.996388] env[61978]: DEBUG nova.compute.manager [req-4f53a3a3-16de-4306-8272-2ccaa4126c9b req-0924b4e9-ebaa-4edf-9e82-7e6f5ba4fdb0 service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] No waiting events found dispatching network-vif-plugged-e0d58422-d319-4563-81b9-65c067c4b306 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1136.001023] env[61978]: WARNING nova.compute.manager [req-4f53a3a3-16de-4306-8272-2ccaa4126c9b req-0924b4e9-ebaa-4edf-9e82-7e6f5ba4fdb0 service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Received unexpected event network-vif-plugged-e0d58422-d319-4563-81b9-65c067c4b306 for instance with vm_state building and task_state spawning. [ 1136.028053] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e30fad7b-fc4f-4b70-b7d4-794045a14651 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.907s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.052401] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Getting list of instances from cluster (obj){ [ 1136.052401] env[61978]: value = "domain-c8" [ 1136.052401] env[61978]: _type = "ClusterComputeResource" [ 1136.052401] env[61978]: } {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1136.053830] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758a9e6a-b7ed-4263-b152-6293bc806ca3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.077916] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Got total of 14 instances {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1136.078114] env[61978]: WARNING nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] While synchronizing instance power states, found 18 instances in the database and 14 instances on the hypervisor. [ 1136.078283] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid 243e7146-46fc-43f4-a83b-cdc58f397f9e {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.078477] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid c17c986e-c008-4414-8dd1-4ea836458048 {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.078632] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.078785] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7 {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.078936] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid bdfdd685-e440-4f53-b6c4-2ee2f06acba8 {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.079126] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.079345] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid 4c7053ee-7c44-49ee-8d30-bf14686c6b1c {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.079517] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid 9ee04ee8-98ec-4be9-935d-cad7cd176466 {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.079652] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid 0cdff646-34ad-49d5-b775-28e8e7ce778e {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.079802] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid a1087abd-28d1-40ac-96ab-dc38392d027c {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.080063] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid 17c56c1c-9992-4559-ad23-c68909ae6792 {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.080210] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid ae6b92bb-6f79-4b52-bdb7-095985bf2fad {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.080284] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid de0f46af-870a-4095-a417-913a2c51f66b {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.080406] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.080547] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid 35a6d3ec-8688-43c2-93c4-b23033aaf280 {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.080689] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid 38e4f039-20bc-4bed-b449-227bde070ed9 {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.080831] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6 {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.080974] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid 94665d8c-df88-4ad0-bb90-547ace2d6345 {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1136.081680] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "243e7146-46fc-43f4-a83b-cdc58f397f9e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.081922] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "243e7146-46fc-43f4-a83b-cdc58f397f9e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.082228] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "c17c986e-c008-4414-8dd1-4ea836458048" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.082452] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.082642] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.082936] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.083179] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.083424] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.083609] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.083844] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.084039] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.084272] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.084476] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.084729] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.084878] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "0cdff646-34ad-49d5-b775-28e8e7ce778e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.085106] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "0cdff646-34ad-49d5-b775-28e8e7ce778e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.085357] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "a1087abd-28d1-40ac-96ab-dc38392d027c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.085558] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "a1087abd-28d1-40ac-96ab-dc38392d027c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.085986] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "17c56c1c-9992-4559-ad23-c68909ae6792" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.086340] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "17c56c1c-9992-4559-ad23-c68909ae6792" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.086516] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.086706] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.086969] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "de0f46af-870a-4095-a417-913a2c51f66b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.087203] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.087408] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "35a6d3ec-8688-43c2-93c4-b23033aaf280" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.087607] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "38e4f039-20bc-4bed-b449-227bde070ed9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.087806] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.088017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "94665d8c-df88-4ad0-bb90-547ace2d6345" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.088210] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.088338] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1136.089095] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfd66b8-723d-477d-b696-1ea4e915f69e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.093783] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2cd73e-a5f6-4183-9db0-348e35573eac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.096521] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761d2ae2-650b-42a1-828d-795f1d038a11 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.099440] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec04d15-85fe-4119-a30b-006eaa9f5e20 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.102028] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb260071-6c52-4157-bf5d-3c32367d3122 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.105426] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72bc94a-f336-4b6d-aed6-a1ba24ba7e53 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.108513] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95dceb4-0d77-41ca-9cd7-523a23f7af6f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.111828] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4f0ce8-b11c-4731-b417-0f42579d4a76 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.114851] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969a21b6-f8cf-4ba1-a2a7-481313ede6df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.118032] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682e8d1c-9bf7-4f62-bb3b-fc034664d587 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.121494] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.135625] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.205264] env[61978]: DEBUG oslo_vmware.api [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395443, 'name': PowerOnVM_Task, 'duration_secs': 0.474966} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.205462] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1136.205673] env[61978]: INFO nova.compute.manager [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Took 9.92 seconds to spawn the instance on the hypervisor. [ 1136.206638] env[61978]: DEBUG nova.compute.manager [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1136.206765] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76e5123-958a-4309-a502-acc0af62ded3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.233896] env[61978]: DEBUG oslo_vmware.api [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395438, 'name': PowerOnVM_Task, 'duration_secs': 1.030942} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.233896] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1136.233896] env[61978]: INFO nova.compute.manager [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Took 17.29 seconds to spawn the instance on the hypervisor. [ 1136.233896] env[61978]: DEBUG nova.compute.manager [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1136.234930] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776d23cf-b4f5-4e4a-a7df-0ec5d0499832 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.257407] env[61978]: DEBUG nova.network.neutron [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Successfully updated port: e0d58422-d319-4563-81b9-65c067c4b306 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1136.406512] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395444, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.448999] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395445, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.670141] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.586s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.670549] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "243e7146-46fc-43f4-a83b-cdc58f397f9e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.589s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.670859] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.588s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.671513] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.589s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.681635] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.595s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.683189] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "a1087abd-28d1-40ac-96ab-dc38392d027c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.598s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.688216] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "0cdff646-34ad-49d5-b775-28e8e7ce778e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.603s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.688543] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.605s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.688916] env[61978]: INFO nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] During sync_power_state the instance has a pending task (rescuing). Skip. [ 1136.689521] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.604s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.689889] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "17c56c1c-9992-4559-ad23-c68909ae6792" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.604s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.690552] env[61978]: INFO nova.compute.manager [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Rescuing [ 1136.690773] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.690922] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.691099] env[61978]: DEBUG nova.network.neutron [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1136.710058] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fb8da2-158c-44e1-8199-6b549abf82bf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.730420] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cf1349-32d5-4cfa-8b1a-92840a3aa8c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.736293] env[61978]: INFO nova.compute.manager [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Took 32.82 seconds to build instance. [ 1136.783822] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "refresh_cache-a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.783988] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquired lock "refresh_cache-a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.784148] env[61978]: DEBUG nova.network.neutron [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1136.791904] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f244aead-d5a1-4e67-9446-4440532f02e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.794888] env[61978]: INFO nova.compute.manager [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Took 37.28 seconds to build instance. [ 1136.806521] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41939bfa-c14a-49d9-ab5b-75a4ff4529f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.823067] env[61978]: DEBUG nova.compute.provider_tree [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.905250] env[61978]: DEBUG oslo_vmware.api [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395444, 'name': PowerOnVM_Task, 'duration_secs': 0.731337} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.905250] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1136.905250] env[61978]: INFO nova.compute.manager [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Took 5.94 seconds to spawn the instance on the hypervisor. [ 1136.905250] env[61978]: DEBUG nova.compute.manager [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1136.906424] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce8bd8f-12de-48dc-a7a4-fa0cdfbb057c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.955019] env[61978]: DEBUG oslo_vmware.api [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395445, 'name': PowerOnVM_Task, 'duration_secs': 0.713503} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.955019] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1136.955019] env[61978]: INFO nova.compute.manager [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Took 13.18 seconds to spawn the instance on the hypervisor. [ 1136.955019] env[61978]: DEBUG nova.compute.manager [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1136.955019] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed9342f-dc7e-4aa9-8519-ee1c938fc94e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.210633] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "0cdff646-34ad-49d5-b775-28e8e7ce778e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.211250] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "0cdff646-34ad-49d5-b775-28e8e7ce778e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.215749] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "0cdff646-34ad-49d5-b775-28e8e7ce778e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.215749] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "0cdff646-34ad-49d5-b775-28e8e7ce778e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.215749] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "0cdff646-34ad-49d5-b775-28e8e7ce778e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.217028] env[61978]: INFO nova.compute.manager [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Terminating instance [ 1137.218741] env[61978]: DEBUG nova.compute.manager [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1137.223018] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1137.223018] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236c1bcb-1578-4b52-ac31-cfa24c8daa36 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.231460] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1137.231754] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36322e37-64ec-4055-8158-fa8395515eba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.241080] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bb89dfa0-55f9-489d-b6ff-230a683fe0c7 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "35a6d3ec-8688-43c2-93c4-b23033aaf280" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.336s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.244197] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "35a6d3ec-8688-43c2-93c4-b23033aaf280" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.157s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.244467] env[61978]: INFO nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] During sync_power_state the instance has a pending task (spawning). Skip. [ 1137.244582] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "35a6d3ec-8688-43c2-93c4-b23033aaf280" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.250237] env[61978]: DEBUG oslo_vmware.api [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1137.250237] env[61978]: value = "task-1395446" [ 1137.250237] env[61978]: _type = "Task" [ 1137.250237] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.260321] env[61978]: DEBUG oslo_vmware.api [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395446, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.300582] env[61978]: DEBUG oslo_concurrency.lockutils [None req-98cab92f-5d83-4ce7-99b8-381dd628b05a tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "de0f46af-870a-4095-a417-913a2c51f66b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.809s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.300802] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "de0f46af-870a-4095-a417-913a2c51f66b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.214s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.300928] env[61978]: INFO nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: de0f46af-870a-4095-a417-913a2c51f66b] During sync_power_state the instance has a pending task (spawning). Skip. [ 1137.301140] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "de0f46af-870a-4095-a417-913a2c51f66b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.301446] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "de0f46af-870a-4095-a417-913a2c51f66b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.301675] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "de0f46af-870a-4095-a417-913a2c51f66b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.301887] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "de0f46af-870a-4095-a417-913a2c51f66b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.302096] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "de0f46af-870a-4095-a417-913a2c51f66b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.302266] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "de0f46af-870a-4095-a417-913a2c51f66b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.309867] env[61978]: INFO nova.compute.manager [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Terminating instance [ 1137.313531] env[61978]: DEBUG nova.compute.manager [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1137.313627] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1137.315354] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d80e6c-00ef-40b3-b986-cc3fdc449173 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.323820] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1137.324157] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bf52438-185f-44fa-8ec8-5ed5470c4b05 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.329326] env[61978]: DEBUG nova.scheduler.client.report [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1137.334461] env[61978]: DEBUG oslo_vmware.api [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1137.334461] env[61978]: value = "task-1395447" [ 1137.334461] env[61978]: _type = "Task" [ 1137.334461] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.346529] env[61978]: DEBUG oslo_vmware.api [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395447, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.362100] env[61978]: DEBUG nova.network.neutron [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1137.386405] env[61978]: DEBUG nova.compute.manager [None req-aa9e57fa-3653-4435-88b0-6a9b2a14e4a3 tempest-ServerDiagnosticsV248Test-290529957 tempest-ServerDiagnosticsV248Test-290529957-project-admin] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1137.391856] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db4f27f-1d41-45fa-9ee3-3ce18f3d6a13 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.403374] env[61978]: INFO nova.compute.manager [None req-aa9e57fa-3653-4435-88b0-6a9b2a14e4a3 tempest-ServerDiagnosticsV248Test-290529957 tempest-ServerDiagnosticsV248Test-290529957-project-admin] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Retrieving diagnostics [ 1137.404390] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed51410-4a31-4580-b031-3f79b50c0bff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.456253] env[61978]: INFO nova.compute.manager [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Took 21.58 seconds to build instance. [ 1137.472810] env[61978]: INFO nova.compute.manager [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Took 33.72 seconds to build instance. [ 1137.592154] env[61978]: DEBUG nova.network.neutron [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updating instance_info_cache with network_info: [{"id": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "address": "fa:16:3e:0f:c0:2a", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf95a42-13", "ovs_interfaceid": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.623122] env[61978]: DEBUG nova.network.neutron [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Updating instance_info_cache with network_info: [{"id": "e0d58422-d319-4563-81b9-65c067c4b306", "address": "fa:16:3e:5c:7f:3b", "network": {"id": "4a12b89f-0910-460a-8694-c424a051b6c6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-917593776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df79d3305e464a6b83f18497a2464140", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d58422-d3", "ovs_interfaceid": "e0d58422-d319-4563-81b9-65c067c4b306", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.746826] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "35a6d3ec-8688-43c2-93c4-b23033aaf280" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.746826] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "35a6d3ec-8688-43c2-93c4-b23033aaf280" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.746826] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "35a6d3ec-8688-43c2-93c4-b23033aaf280-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.746826] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "35a6d3ec-8688-43c2-93c4-b23033aaf280-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.746826] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "35a6d3ec-8688-43c2-93c4-b23033aaf280-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.749945] env[61978]: INFO nova.compute.manager [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Terminating instance [ 1137.755174] env[61978]: DEBUG nova.compute.manager [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1137.755505] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1137.756383] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b33584-b909-4b0b-b083-f425efd5508e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.765837] env[61978]: DEBUG oslo_vmware.api [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395446, 'name': PowerOffVM_Task, 'duration_secs': 0.434202} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.768067] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1137.768414] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1137.768900] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1137.770132] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbbd4de6-2b3b-4fe3-8f54-024a2e8c81e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.774054] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6768390a-130d-41a2-b8b7-51e1c51bc57f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.780413] env[61978]: DEBUG oslo_vmware.api [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1137.780413] env[61978]: value = "task-1395448" [ 1137.780413] env[61978]: _type = "Task" [ 1137.780413] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.790590] env[61978]: DEBUG oslo_vmware.api [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395448, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.836781] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.837448] env[61978]: DEBUG nova.compute.manager [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1137.840316] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.028s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.840418] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.842675] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.860s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.846347] env[61978]: INFO nova.compute.claims [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1137.858029] env[61978]: DEBUG oslo_vmware.api [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395447, 'name': PowerOffVM_Task, 'duration_secs': 0.352026} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.858029] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1137.858159] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1137.858386] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b1e1298-1daa-4040-a69d-acb2f32d0765 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.878010] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1137.879699] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1137.879699] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Deleting the datastore file [datastore1] 0cdff646-34ad-49d5-b775-28e8e7ce778e {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1137.879699] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf558e91-83e3-4d5c-b64d-50e701f9c77f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.888676] env[61978]: DEBUG oslo_vmware.api [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for the task: (returnval){ [ 1137.888676] env[61978]: value = "task-1395451" [ 1137.888676] env[61978]: _type = "Task" [ 1137.888676] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.896055] env[61978]: INFO nova.scheduler.client.report [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleted allocations for instance c17c986e-c008-4414-8dd1-4ea836458048 [ 1137.905356] env[61978]: DEBUG oslo_vmware.api [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395451, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.958889] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e88a6b50-2ee1-4dfe-9dfd-38c830cd9522 tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Lock "38e4f039-20bc-4bed-b449-227bde070ed9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.100s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.958889] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "38e4f039-20bc-4bed-b449-227bde070ed9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.871s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.959258] env[61978]: INFO nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] During sync_power_state the instance has a pending task (spawning). Skip. [ 1137.959603] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "38e4f039-20bc-4bed-b449-227bde070ed9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.977110] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b8f8629c-e5f2-463c-b64f-671d0e1b8905 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.234s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.977110] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.890s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.977425] env[61978]: INFO nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] During sync_power_state the instance has a pending task (spawning). Skip. [ 1137.977486] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.023918] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1138.024334] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1138.024587] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleting the datastore file [datastore2] de0f46af-870a-4095-a417-913a2c51f66b {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1138.024926] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c8bf2d8-f2e9-4792-a9b0-fc6b734f31da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.033833] env[61978]: DEBUG oslo_vmware.api [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1138.033833] env[61978]: value = "task-1395452" [ 1138.033833] env[61978]: _type = "Task" [ 1138.033833] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.044870] env[61978]: DEBUG oslo_vmware.api [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.094824] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1138.125603] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Releasing lock "refresh_cache-a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1138.126259] env[61978]: DEBUG nova.compute.manager [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Instance network_info: |[{"id": "e0d58422-d319-4563-81b9-65c067c4b306", "address": "fa:16:3e:5c:7f:3b", "network": {"id": "4a12b89f-0910-460a-8694-c424a051b6c6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-917593776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df79d3305e464a6b83f18497a2464140", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d58422-d3", "ovs_interfaceid": "e0d58422-d319-4563-81b9-65c067c4b306", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1138.126542] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:7f:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0d58422-d319-4563-81b9-65c067c4b306', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1138.135612] env[61978]: DEBUG oslo.service.loopingcall [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1138.136785] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1138.137019] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03ac1a76-484d-4ffe-a1d7-6acea88bae17 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.161986] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1138.161986] env[61978]: value = "task-1395453" [ 1138.161986] env[61978]: _type = "Task" [ 1138.161986] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.173687] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395453, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.242933] env[61978]: DEBUG nova.compute.manager [req-0aea27f3-eb27-4971-a0fd-058fa88acab9 req-bdea93c4-a1cd-4e26-8abb-dbaa07d5262d service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Received event network-changed-e0d58422-d319-4563-81b9-65c067c4b306 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1138.243208] env[61978]: DEBUG nova.compute.manager [req-0aea27f3-eb27-4971-a0fd-058fa88acab9 req-bdea93c4-a1cd-4e26-8abb-dbaa07d5262d service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Refreshing instance network info cache due to event network-changed-e0d58422-d319-4563-81b9-65c067c4b306. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1138.243428] env[61978]: DEBUG oslo_concurrency.lockutils [req-0aea27f3-eb27-4971-a0fd-058fa88acab9 req-bdea93c4-a1cd-4e26-8abb-dbaa07d5262d service nova] Acquiring lock "refresh_cache-a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1138.243585] env[61978]: DEBUG oslo_concurrency.lockutils [req-0aea27f3-eb27-4971-a0fd-058fa88acab9 req-bdea93c4-a1cd-4e26-8abb-dbaa07d5262d service nova] Acquired lock "refresh_cache-a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.243753] env[61978]: DEBUG nova.network.neutron [req-0aea27f3-eb27-4971-a0fd-058fa88acab9 req-bdea93c4-a1cd-4e26-8abb-dbaa07d5262d service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Refreshing network info cache for port e0d58422-d319-4563-81b9-65c067c4b306 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1138.291207] env[61978]: DEBUG oslo_vmware.api [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395448, 'name': PowerOffVM_Task, 'duration_secs': 0.278146} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.291538] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1138.291782] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1138.292021] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-826b9dae-9549-46d7-825a-c55557ef2d54 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.355246] env[61978]: DEBUG nova.compute.utils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1138.356798] env[61978]: DEBUG nova.compute.manager [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1138.357011] env[61978]: DEBUG nova.network.neutron [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1138.363150] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1138.363480] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1138.363691] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Deleting the datastore file [datastore2] 35a6d3ec-8688-43c2-93c4-b23033aaf280 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1138.364016] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dacbaf20-424c-4333-8f67-c38efa4274a7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.373740] env[61978]: DEBUG oslo_vmware.api [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1138.373740] env[61978]: value = "task-1395455" [ 1138.373740] env[61978]: _type = "Task" [ 1138.373740] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.386350] env[61978]: DEBUG oslo_vmware.api [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395455, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.399059] env[61978]: DEBUG oslo_vmware.api [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Task: {'id': task-1395451, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237212} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.404434] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1138.404434] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1138.404434] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1138.404434] env[61978]: INFO nova.compute.manager [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1138.404434] env[61978]: DEBUG oslo.service.loopingcall [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1138.404434] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a00e5a84-314b-4353-8b0c-ea0a2b622b12 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "c17c986e-c008-4414-8dd1-4ea836458048" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.591s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.404434] env[61978]: DEBUG nova.compute.manager [-] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1138.404434] env[61978]: DEBUG nova.network.neutron [-] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1138.405699] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "c17c986e-c008-4414-8dd1-4ea836458048" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.323s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.406296] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5a82ec5-bd7b-4e1b-b658-12b9a6d6e653 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.419354] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29aada4-e624-47db-9821-74d97839497c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.439258] env[61978]: DEBUG nova.policy [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9580f2ba2f244d8c9950bbe509c7c9ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d4d29d9b6a74b4887684c7b310280b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1138.478332] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5284fa50-9179-4123-a1aa-3ca1873bbc07 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.489465] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3232559a-43f5-42df-b164-204254092e0d tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Suspending the VM {{(pid=61978) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1138.489758] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-08d8b9ca-2014-4c9e-acfa-d36a74efeb6e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.497811] env[61978]: DEBUG oslo_vmware.api [None req-3232559a-43f5-42df-b164-204254092e0d tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1138.497811] env[61978]: value = "task-1395456" [ 1138.497811] env[61978]: _type = "Task" [ 1138.497811] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.509868] env[61978]: DEBUG oslo_vmware.api [None req-3232559a-43f5-42df-b164-204254092e0d tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395456, 'name': SuspendVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.546916] env[61978]: DEBUG oslo_vmware.api [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153674} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.546916] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1138.546916] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1138.546916] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1138.546916] env[61978]: INFO nova.compute.manager [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1138.547263] env[61978]: DEBUG oslo.service.loopingcall [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1138.547309] env[61978]: DEBUG nova.compute.manager [-] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1138.547401] env[61978]: DEBUG nova.network.neutron [-] [instance: de0f46af-870a-4095-a417-913a2c51f66b] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1138.626631] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1138.626967] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b729758-f1cd-454f-81f7-2147a8bf5fea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.635844] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1138.635844] env[61978]: value = "task-1395457" [ 1138.635844] env[61978]: _type = "Task" [ 1138.635844] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.645927] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395457, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.674211] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395453, 'name': CreateVM_Task, 'duration_secs': 0.431693} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.675041] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1138.675312] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1138.675414] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.675799] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1138.676320] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-371a3eb2-527a-4824-878f-fdb127e234ba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.682410] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1138.682410] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52549c35-3c65-0d44-a6e4-5fd13d6dbe8e" [ 1138.682410] env[61978]: _type = "Task" [ 1138.682410] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.693527] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52549c35-3c65-0d44-a6e4-5fd13d6dbe8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.863897] env[61978]: DEBUG nova.compute.manager [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1138.888025] env[61978]: DEBUG oslo_vmware.api [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395455, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.265225} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.888326] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1138.888520] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1138.888703] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1138.888906] env[61978]: INFO nova.compute.manager [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1138.889179] env[61978]: DEBUG oslo.service.loopingcall [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1138.889406] env[61978]: DEBUG nova.compute.manager [-] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1138.889961] env[61978]: DEBUG nova.network.neutron [-] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1138.959540] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "c17c986e-c008-4414-8dd1-4ea836458048" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.552s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.018373] env[61978]: DEBUG oslo_vmware.api [None req-3232559a-43f5-42df-b164-204254092e0d tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395456, 'name': SuspendVM_Task} progress is 62%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.155066] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395457, 'name': PowerOffVM_Task, 'duration_secs': 0.459654} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.158921] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1139.159793] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a96ab6-257f-4575-8d32-35d9bc50de25 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.202618] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c50078-baf8-443a-ada2-faea3fba9da1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.215502] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "81f0b79c-97b3-4a5d-a8fc-7c2250571177" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.215793] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "81f0b79c-97b3-4a5d-a8fc-7c2250571177" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.227651] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52549c35-3c65-0d44-a6e4-5fd13d6dbe8e, 'name': SearchDatastore_Task, 'duration_secs': 0.01616} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.230781] env[61978]: DEBUG nova.network.neutron [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Successfully created port: 25e2db6a-d281-4865-8d9b-8ae12370c2b9 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1139.234259] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.234259] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1139.234259] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.234500] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.234723] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1139.235641] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8d68c81-084a-4469-a8bd-9ab21ca4fe3e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.253918] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1139.254684] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1139.255756] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4e60d09-d9f9-4b5c-a00e-8c406fd7a5d6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.264229] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1139.264229] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b36fae-5f6c-69c5-eac8-a7d6d3b99143" [ 1139.264229] env[61978]: _type = "Task" [ 1139.264229] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.270801] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1139.271273] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2028cc02-5644-4992-9ee2-3ad10dce145f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.282314] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b36fae-5f6c-69c5-eac8-a7d6d3b99143, 'name': SearchDatastore_Task, 'duration_secs': 0.01169} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.284768] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1139.284768] env[61978]: value = "task-1395458" [ 1139.284768] env[61978]: _type = "Task" [ 1139.284768] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.285231] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36fed18a-8fc0-4cd4-ae64-20d93a4e7dd8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.303462] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1139.303462] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5205da64-9a81-0147-cbb7-2e59b02f215b" [ 1139.303462] env[61978]: _type = "Task" [ 1139.303462] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.310535] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1139.310800] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1139.311068] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.317294] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5205da64-9a81-0147-cbb7-2e59b02f215b, 'name': SearchDatastore_Task, 'duration_secs': 0.011618} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.318565] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.318845] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6/a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1139.319647] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e958e7-df54-4dce-8fed-ece9b39e5e33 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.322324] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.322546] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1139.322820] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8289701-a896-46e3-8e96-e96c6541088c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.325186] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7263ab63-e051-452b-9846-94adb2d0990a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.332519] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e172e459-b63e-4270-8646-d2c283bc2f57 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.338044] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1139.338095] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1139.339783] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1139.339783] env[61978]: value = "task-1395459" [ 1139.339783] env[61978]: _type = "Task" [ 1139.339783] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.339783] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86741850-8695-46fb-814c-e7fa2f469388 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.343009] env[61978]: DEBUG nova.network.neutron [req-0aea27f3-eb27-4971-a0fd-058fa88acab9 req-bdea93c4-a1cd-4e26-8abb-dbaa07d5262d service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Updated VIF entry in instance network info cache for port e0d58422-d319-4563-81b9-65c067c4b306. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1139.343103] env[61978]: DEBUG nova.network.neutron [req-0aea27f3-eb27-4971-a0fd-058fa88acab9 req-bdea93c4-a1cd-4e26-8abb-dbaa07d5262d service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Updating instance_info_cache with network_info: [{"id": "e0d58422-d319-4563-81b9-65c067c4b306", "address": "fa:16:3e:5c:7f:3b", "network": {"id": "4a12b89f-0910-460a-8694-c424a051b6c6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-917593776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df79d3305e464a6b83f18497a2464140", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d58422-d3", "ovs_interfaceid": "e0d58422-d319-4563-81b9-65c067c4b306", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.380889] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b66878e-ca9a-4439-a02f-60081daead0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.383878] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1139.383878] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eb5ffd-f595-8f39-09a6-2e6f5b3e514b" [ 1139.383878] env[61978]: _type = "Task" [ 1139.383878] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.387400] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.394929] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981af535-8dc9-4266-9784-6bab7635def2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.403039] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eb5ffd-f595-8f39-09a6-2e6f5b3e514b, 'name': SearchDatastore_Task, 'duration_secs': 0.010178} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.405755] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41077b57-e4d2-4f64-8151-3aa3f706f541 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.415765] env[61978]: DEBUG nova.compute.provider_tree [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1139.422567] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1139.422567] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bf6180-7a28-6861-fcab-869b369b3de3" [ 1139.422567] env[61978]: _type = "Task" [ 1139.422567] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.432885] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bf6180-7a28-6861-fcab-869b369b3de3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.513020] env[61978]: DEBUG oslo_vmware.api [None req-3232559a-43f5-42df-b164-204254092e0d tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395456, 'name': SuspendVM_Task, 'duration_secs': 0.709842} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.513579] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3232559a-43f5-42df-b164-204254092e0d tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Suspended the VM {{(pid=61978) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1139.513830] env[61978]: DEBUG nova.compute.manager [None req-3232559a-43f5-42df-b164-204254092e0d tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1139.515594] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae662922-4c51-44c6-84e4-ef043a7cee40 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.594018] env[61978]: DEBUG nova.network.neutron [-] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.719474] env[61978]: DEBUG nova.compute.manager [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1139.797560] env[61978]: DEBUG nova.network.neutron [-] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.852593] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395459, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.873933] env[61978]: DEBUG oslo_concurrency.lockutils [req-0aea27f3-eb27-4971-a0fd-058fa88acab9 req-bdea93c4-a1cd-4e26-8abb-dbaa07d5262d service nova] Releasing lock "refresh_cache-a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.879319] env[61978]: DEBUG nova.compute.manager [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1139.886012] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "d3c82821-0617-4de6-8109-813a67910ed1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.886287] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "d3c82821-0617-4de6-8109-813a67910ed1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.905779] env[61978]: DEBUG nova.virt.hardware [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1139.906077] env[61978]: DEBUG nova.virt.hardware [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1139.906249] env[61978]: DEBUG nova.virt.hardware [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.906465] env[61978]: DEBUG nova.virt.hardware [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1139.906726] env[61978]: DEBUG nova.virt.hardware [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.906920] env[61978]: DEBUG nova.virt.hardware [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1139.907154] env[61978]: DEBUG nova.virt.hardware [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1139.907323] env[61978]: DEBUG nova.virt.hardware [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1139.907491] env[61978]: DEBUG nova.virt.hardware [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1139.907658] env[61978]: DEBUG nova.virt.hardware [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1139.907833] env[61978]: DEBUG nova.virt.hardware [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1139.908921] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f96ecc9-057b-4dc4-a844-0b5d576f9ead {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.917432] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81d9f07-a9e0-4f1c-b520-8b20e52e69e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.922214] env[61978]: DEBUG nova.scheduler.client.report [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1139.944706] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bf6180-7a28-6861-fcab-869b369b3de3, 'name': SearchDatastore_Task, 'duration_secs': 0.010342} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.944964] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.945242] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 9ee04ee8-98ec-4be9-935d-cad7cd176466/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk. {{(pid=61978) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1139.945505] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7c19ac1-febe-4969-b41f-134be8be1ea5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.953046] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1139.953046] env[61978]: value = "task-1395460" [ 1139.953046] env[61978]: _type = "Task" [ 1139.953046] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.961614] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.014579] env[61978]: DEBUG nova.network.neutron [-] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.096700] env[61978]: INFO nova.compute.manager [-] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Took 1.69 seconds to deallocate network for instance. [ 1140.133098] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "interface-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.133397] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.133823] env[61978]: DEBUG nova.objects.instance [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'flavor' on Instance uuid 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.236317] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.236675] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.237068] env[61978]: INFO nova.compute.manager [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Rebooting instance [ 1140.242152] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.273283] env[61978]: DEBUG nova.compute.manager [req-efe0d6a5-75dd-4e3f-a78a-3be7ad9130cd req-62f13a49-6a8f-4e38-91bb-d5831dd025d2 service nova] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Received event network-vif-deleted-3b5e4ed3-f9fb-4eed-b851-213b746751b7 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1140.273518] env[61978]: DEBUG nova.compute.manager [req-efe0d6a5-75dd-4e3f-a78a-3be7ad9130cd req-62f13a49-6a8f-4e38-91bb-d5831dd025d2 service nova] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Received event network-vif-deleted-ddce6e3c-0596-4fb1-81fc-7ad5823e1f15 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1140.273703] env[61978]: DEBUG nova.compute.manager [req-efe0d6a5-75dd-4e3f-a78a-3be7ad9130cd req-62f13a49-6a8f-4e38-91bb-d5831dd025d2 service nova] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Received event network-vif-deleted-86b0db93-0668-4d3e-9bc5-6220369d7160 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1140.299935] env[61978]: INFO nova.compute.manager [-] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Took 1.75 seconds to deallocate network for instance. [ 1140.356578] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395459, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523882} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.356907] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6/a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1140.357184] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1140.357466] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e4a61fe-2105-45ee-b95d-7f6c8a1937d6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.369092] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1140.369092] env[61978]: value = "task-1395461" [ 1140.369092] env[61978]: _type = "Task" [ 1140.369092] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.379163] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395461, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.389146] env[61978]: DEBUG nova.compute.manager [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1140.428032] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.585s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.428643] env[61978]: DEBUG nova.compute.manager [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1140.431996] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.168s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.433797] env[61978]: INFO nova.compute.claims [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1140.464079] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395460, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441609} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.464418] env[61978]: INFO nova.virt.vmwareapi.ds_util [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 9ee04ee8-98ec-4be9-935d-cad7cd176466/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk. [ 1140.465268] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfb5225-9d94-48b4-aa37-b55f9f64c5b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.494786] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] 9ee04ee8-98ec-4be9-935d-cad7cd176466/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1140.495434] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bb832c0-875c-495d-940a-46482bec41ae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.515239] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1140.515239] env[61978]: value = "task-1395462" [ 1140.515239] env[61978]: _type = "Task" [ 1140.515239] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.518925] env[61978]: INFO nova.compute.manager [-] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Took 1.63 seconds to deallocate network for instance. [ 1140.526638] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395462, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.603372] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.737987] env[61978]: DEBUG nova.objects.instance [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'pci_requests' on Instance uuid 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.753783] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.753961] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.754148] env[61978]: DEBUG nova.network.neutron [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1140.806912] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.879371] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395461, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071559} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.879637] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1140.880427] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc2afcd-5c7e-434e-8078-9144bd22eea7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.904893] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6/a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1140.907203] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c503e861-c02f-43ba-b0d9-a45059c3bce1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.928817] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1140.928817] env[61978]: value = "task-1395463" [ 1140.928817] env[61978]: _type = "Task" [ 1140.928817] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.933047] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.942501] env[61978]: DEBUG nova.compute.utils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1140.945614] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395463, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.946828] env[61978]: DEBUG nova.network.neutron [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Successfully updated port: 25e2db6a-d281-4865-8d9b-8ae12370c2b9 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1140.949554] env[61978]: DEBUG nova.compute.manager [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1140.949554] env[61978]: DEBUG nova.network.neutron [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1140.991558] env[61978]: DEBUG nova.policy [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c7f4a6e7c3024aa79fa74eeae1a4d5f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a1c323dddcd42809d565f46ecf5e18f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1141.029140] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.029784] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395462, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.240308] env[61978]: DEBUG nova.objects.base [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Object Instance<9bee3e66-93b5-4c0f-bb46-8fbd78c312c0> lazy-loaded attributes: flavor,pci_requests {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1141.240705] env[61978]: DEBUG nova.network.neutron [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1141.290908] env[61978]: DEBUG nova.network.neutron [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Successfully created port: 63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1141.299212] env[61978]: DEBUG nova.policy [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '346f982562de48fab1702aca567113ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a4f29a959447159b2f7d194ea94873', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1141.440092] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395463, 'name': ReconfigVM_Task, 'duration_secs': 0.29788} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.440397] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Reconfigured VM instance instance-0000004b to attach disk [datastore2] a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6/a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1141.441044] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8059b3a4-f2e8-4bae-8daa-071de680c877 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.447126] env[61978]: DEBUG nova.compute.manager [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1141.453400] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1141.453400] env[61978]: value = "task-1395464" [ 1141.453400] env[61978]: _type = "Task" [ 1141.453400] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.457750] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "refresh_cache-94665d8c-df88-4ad0-bb90-547ace2d6345" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.457750] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquired lock "refresh_cache-94665d8c-df88-4ad0-bb90-547ace2d6345" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.459108] env[61978]: DEBUG nova.network.neutron [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1141.480795] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395464, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.527775] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395462, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.599269] env[61978]: DEBUG nova.network.neutron [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance_info_cache with network_info: [{"id": "7417d7e9-723d-408d-bfa4-e583af757e79", "address": "fa:16:3e:e6:e3:c6", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7417d7e9-72", "ovs_interfaceid": "7417d7e9-723d-408d-bfa4-e583af757e79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.631794] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.632073] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.632320] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.632531] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.632714] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.635445] env[61978]: INFO nova.compute.manager [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Terminating instance [ 1141.640216] env[61978]: DEBUG nova.compute.manager [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1141.640652] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1141.641503] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7efdd51-381d-45a5-bae3-7ec50b7f0a20 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.650043] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1141.652637] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8477e399-91da-4d7e-9694-0e7b8a8316de {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.722598] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1141.725298] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1141.725298] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleting the datastore file [datastore2] 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1141.725298] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f169ec47-247b-49d2-992e-6d9a145a35b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.730926] env[61978]: DEBUG oslo_vmware.api [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1141.730926] env[61978]: value = "task-1395466" [ 1141.730926] env[61978]: _type = "Task" [ 1141.730926] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.742610] env[61978]: DEBUG oslo_vmware.api [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395466, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.788257] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ba3cca-3a1a-4a24-9193-349101a7defe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.795639] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1d37dc-e53e-4239-8bab-28ef723a7f4a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.800709] env[61978]: DEBUG nova.network.neutron [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Successfully created port: 3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1141.833366] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7fb65c-d9b1-4142-b860-d19ef2e5cdcd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.844071] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cf1931-973f-4f6c-ac6c-70934633ccad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.858968] env[61978]: DEBUG nova.compute.provider_tree [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.980990] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395464, 'name': Rename_Task, 'duration_secs': 0.144751} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.981487] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1141.981906] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-274dcc6c-ec57-48dc-a26e-f56bca0abbf5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.990203] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1141.990203] env[61978]: value = "task-1395467" [ 1141.990203] env[61978]: _type = "Task" [ 1141.990203] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.000279] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395467, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.018613] env[61978]: DEBUG nova.network.neutron [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1142.036701] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395462, 'name': ReconfigVM_Task, 'duration_secs': 1.121029} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.037144] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Reconfigured VM instance instance-0000003a to attach disk [datastore2] 9ee04ee8-98ec-4be9-935d-cad7cd176466/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1142.038998] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3367a42e-e15f-4004-b513-7d756b67d4ac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.072669] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbd072bb-9df0-411b-9163-b026df6313e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.090643] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1142.090643] env[61978]: value = "task-1395468" [ 1142.090643] env[61978]: _type = "Task" [ 1142.090643] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.103299] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.105035] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395468, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.105477] env[61978]: DEBUG nova.compute.manager [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1142.106309] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc1cc43-92b3-4710-ac96-02bb6a6320e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.211149] env[61978]: DEBUG nova.network.neutron [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Updating instance_info_cache with network_info: [{"id": "25e2db6a-d281-4865-8d9b-8ae12370c2b9", "address": "fa:16:3e:64:7b:8d", "network": {"id": "61175c4b-0b5d-4186-97ac-7a615b95ef28", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-341600036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d4d29d9b6a74b4887684c7b310280b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25e2db6a-d2", "ovs_interfaceid": "25e2db6a-d281-4865-8d9b-8ae12370c2b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.243511] env[61978]: DEBUG oslo_vmware.api [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395466, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18472} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.243847] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1142.244089] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1142.244307] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1142.244554] env[61978]: INFO nova.compute.manager [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1142.244893] env[61978]: DEBUG oslo.service.loopingcall [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1142.245180] env[61978]: DEBUG nova.compute.manager [-] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1142.245307] env[61978]: DEBUG nova.network.neutron [-] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1142.364557] env[61978]: DEBUG nova.scheduler.client.report [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1142.466541] env[61978]: DEBUG nova.compute.manager [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1142.502150] env[61978]: DEBUG nova.virt.hardware [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1142.502544] env[61978]: DEBUG nova.virt.hardware [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1142.502772] env[61978]: DEBUG nova.virt.hardware [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1142.502980] env[61978]: DEBUG nova.virt.hardware [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1142.503159] env[61978]: DEBUG nova.virt.hardware [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1142.503315] env[61978]: DEBUG nova.virt.hardware [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1142.503530] env[61978]: DEBUG nova.virt.hardware [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1142.503709] env[61978]: DEBUG nova.virt.hardware [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1142.503901] env[61978]: DEBUG nova.virt.hardware [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1142.504086] env[61978]: DEBUG nova.virt.hardware [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1142.504268] env[61978]: DEBUG nova.virt.hardware [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1142.505113] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeeb9289-16f0-4651-bdc9-2a6783449a98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.512462] env[61978]: DEBUG oslo_vmware.api [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395467, 'name': PowerOnVM_Task, 'duration_secs': 0.479587} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.513229] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1142.513442] env[61978]: INFO nova.compute.manager [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Took 7.12 seconds to spawn the instance on the hypervisor. [ 1142.513630] env[61978]: DEBUG nova.compute.manager [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1142.514410] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7971b4e1-c875-40f1-96e4-a93142e313b7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.520367] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce00888-cf9e-4f9f-80a3-99b417520032 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.600692] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395468, 'name': ReconfigVM_Task, 'duration_secs': 0.191743} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.601048] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1142.601324] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7eaf3c6-645a-4b92-b8d7-81509c44b0d3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.609441] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1142.609441] env[61978]: value = "task-1395469" [ 1142.609441] env[61978]: _type = "Task" [ 1142.609441] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.621031] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395469, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.646239] env[61978]: DEBUG nova.compute.manager [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Received event network-vif-plugged-25e2db6a-d281-4865-8d9b-8ae12370c2b9 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1142.646239] env[61978]: DEBUG oslo_concurrency.lockutils [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] Acquiring lock "94665d8c-df88-4ad0-bb90-547ace2d6345-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.646723] env[61978]: DEBUG oslo_concurrency.lockutils [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] Lock "94665d8c-df88-4ad0-bb90-547ace2d6345-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.647295] env[61978]: DEBUG oslo_concurrency.lockutils [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] Lock "94665d8c-df88-4ad0-bb90-547ace2d6345-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.648279] env[61978]: DEBUG nova.compute.manager [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] No waiting events found dispatching network-vif-plugged-25e2db6a-d281-4865-8d9b-8ae12370c2b9 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1142.648530] env[61978]: WARNING nova.compute.manager [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Received unexpected event network-vif-plugged-25e2db6a-d281-4865-8d9b-8ae12370c2b9 for instance with vm_state building and task_state spawning. [ 1142.648905] env[61978]: DEBUG nova.compute.manager [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Received event network-changed-25e2db6a-d281-4865-8d9b-8ae12370c2b9 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1142.650102] env[61978]: DEBUG nova.compute.manager [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Refreshing instance network info cache due to event network-changed-25e2db6a-d281-4865-8d9b-8ae12370c2b9. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1142.650102] env[61978]: DEBUG oslo_concurrency.lockutils [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] Acquiring lock "refresh_cache-94665d8c-df88-4ad0-bb90-547ace2d6345" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.717837] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Releasing lock "refresh_cache-94665d8c-df88-4ad0-bb90-547ace2d6345" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.717837] env[61978]: DEBUG nova.compute.manager [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Instance network_info: |[{"id": "25e2db6a-d281-4865-8d9b-8ae12370c2b9", "address": "fa:16:3e:64:7b:8d", "network": {"id": "61175c4b-0b5d-4186-97ac-7a615b95ef28", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-341600036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d4d29d9b6a74b4887684c7b310280b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25e2db6a-d2", "ovs_interfaceid": "25e2db6a-d281-4865-8d9b-8ae12370c2b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1142.717837] env[61978]: DEBUG oslo_concurrency.lockutils [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] Acquired lock "refresh_cache-94665d8c-df88-4ad0-bb90-547ace2d6345" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.717837] env[61978]: DEBUG nova.network.neutron [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Refreshing network info cache for port 25e2db6a-d281-4865-8d9b-8ae12370c2b9 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1142.717837] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:7b:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25e2db6a-d281-4865-8d9b-8ae12370c2b9', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1142.727973] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Creating folder: Project (2d4d29d9b6a74b4887684c7b310280b7). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1142.730222] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3c8a0b1-8c7e-492d-91c2-9b95039e33e2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.744951] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Created folder: Project (2d4d29d9b6a74b4887684c7b310280b7) in parent group-v295764. [ 1142.746135] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Creating folder: Instances. Parent ref: group-v295964. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1142.746842] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0e429ec-a983-4e0f-8eaf-3d878c48c84d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.759729] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Created folder: Instances in parent group-v295964. [ 1142.759729] env[61978]: DEBUG oslo.service.loopingcall [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1142.760405] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1142.760966] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8b13108-0033-4f35-a74d-5b28e5925ec9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.783811] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1142.783811] env[61978]: value = "task-1395472" [ 1142.783811] env[61978]: _type = "Task" [ 1142.783811] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.795478] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395472, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.869827] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.438s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.870393] env[61978]: DEBUG nova.compute.manager [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1142.873303] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.726s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.873561] env[61978]: DEBUG nova.objects.instance [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lazy-loading 'resources' on Instance uuid 4c7053ee-7c44-49ee-8d30-bf14686c6b1c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.981656] env[61978]: DEBUG nova.network.neutron [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Successfully updated port: 63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1143.011323] env[61978]: DEBUG nova.compute.manager [req-98bfe80d-63e3-4873-b071-f7f3cd89ea72 req-616d7c18-919e-44d5-88cc-bd1b01c2cc38 service nova] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Received event network-vif-deleted-940a682a-ece9-493f-a4e0-56b30bf8bba7 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1143.011399] env[61978]: INFO nova.compute.manager [req-98bfe80d-63e3-4873-b071-f7f3cd89ea72 req-616d7c18-919e-44d5-88cc-bd1b01c2cc38 service nova] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Neutron deleted interface 940a682a-ece9-493f-a4e0-56b30bf8bba7; detaching it from the instance and deleting it from the info cache [ 1143.011572] env[61978]: DEBUG nova.network.neutron [req-98bfe80d-63e3-4873-b071-f7f3cd89ea72 req-616d7c18-919e-44d5-88cc-bd1b01c2cc38 service nova] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.035213] env[61978]: INFO nova.compute.manager [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Took 21.86 seconds to build instance. [ 1143.119934] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395469, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.123638] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d0c8e2-178d-4d66-8d7b-45fe303edfb7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.130977] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Doing hard reboot of VM {{(pid=61978) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1143.131281] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-06f7ab1c-44a8-413f-b0fb-7bbbcee2ac18 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.138897] env[61978]: DEBUG oslo_vmware.api [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1143.138897] env[61978]: value = "task-1395473" [ 1143.138897] env[61978]: _type = "Task" [ 1143.138897] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.148319] env[61978]: DEBUG oslo_vmware.api [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395473, 'name': ResetVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.263032] env[61978]: DEBUG nova.network.neutron [-] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.303882] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395472, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.378531] env[61978]: DEBUG nova.compute.utils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1143.385017] env[61978]: DEBUG nova.compute.manager [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1143.387679] env[61978]: DEBUG nova.network.neutron [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1143.484757] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquiring lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.484935] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquired lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.485136] env[61978]: DEBUG nova.network.neutron [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1143.512328] env[61978]: DEBUG nova.policy [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd3571cd641ba4e629e109eaca11ecb78', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad349fcfb78f46f0be51dfa32f635c59', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1143.517461] env[61978]: DEBUG nova.network.neutron [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Successfully updated port: 3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1143.520295] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80d5a80c-4cf8-431d-963f-91866b5c4168 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.536693] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef38ba37-6559-4e2b-9cc8-8d3c554ea25a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.548680] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49e997c8-dd9b-425c-99e5-e0b2432ed020 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.395s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.552049] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.464s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.552348] env[61978]: INFO nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] During sync_power_state the instance has a pending task (spawning). Skip. [ 1143.552545] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.560351] env[61978]: DEBUG nova.network.neutron [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Updated VIF entry in instance network info cache for port 25e2db6a-d281-4865-8d9b-8ae12370c2b9. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1143.560711] env[61978]: DEBUG nova.network.neutron [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Updating instance_info_cache with network_info: [{"id": "25e2db6a-d281-4865-8d9b-8ae12370c2b9", "address": "fa:16:3e:64:7b:8d", "network": {"id": "61175c4b-0b5d-4186-97ac-7a615b95ef28", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-341600036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d4d29d9b6a74b4887684c7b310280b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25e2db6a-d2", "ovs_interfaceid": "25e2db6a-d281-4865-8d9b-8ae12370c2b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.579499] env[61978]: DEBUG nova.compute.manager [req-98bfe80d-63e3-4873-b071-f7f3cd89ea72 req-616d7c18-919e-44d5-88cc-bd1b01c2cc38 service nova] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Detach interface failed, port_id=940a682a-ece9-493f-a4e0-56b30bf8bba7, reason: Instance 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1143.620311] env[61978]: DEBUG oslo_vmware.api [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395469, 'name': PowerOnVM_Task, 'duration_secs': 0.708894} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.620589] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1143.623338] env[61978]: DEBUG nova.compute.manager [None req-c2efae75-8118-4008-b6dc-0e37978b2091 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1143.624135] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e005edb2-4a6c-4e7a-847b-7bac1a761ce0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.651963] env[61978]: DEBUG oslo_vmware.api [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395473, 'name': ResetVM_Task, 'duration_secs': 0.104806} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.652332] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Did hard reboot of VM {{(pid=61978) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1143.652531] env[61978]: DEBUG nova.compute.manager [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1143.653333] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7222f936-40b4-41f0-a2ed-b1a4612b89b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.765848] env[61978]: INFO nova.compute.manager [-] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Took 1.52 seconds to deallocate network for instance. [ 1143.772898] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73ec5e3-cb0a-4f76-871f-470235c39f37 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.781892] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0d9112-dc99-4e81-af5e-888d7b3f73ae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.817140] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0043232-e980-4578-b1cc-29d88cc82b4c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.823150] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395472, 'name': CreateVM_Task, 'duration_secs': 0.545953} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.823794] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1143.824481] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.824648] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.824990] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1143.825629] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3116840-577a-44a0-af91-a019e5e6ae85 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.830664] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab571cc-ed10-4285-a1fe-fa02aba9a921 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.835944] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1143.835944] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a037a7-b1e0-82a9-35c3-e59dc7a09f6d" [ 1143.835944] env[61978]: _type = "Task" [ 1143.835944] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.853678] env[61978]: DEBUG nova.compute.provider_tree [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.860532] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a037a7-b1e0-82a9-35c3-e59dc7a09f6d, 'name': SearchDatastore_Task, 'duration_secs': 0.016743} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.860828] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1143.861075] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1143.861319] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1143.861533] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.861648] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1143.861903] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5844638-0fb2-44e8-8016-a0fd16bd024c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.876665] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1143.876787] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1143.877497] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6a4e296-6883-459e-b283-2d00387520fa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.883042] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1143.883042] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]529aafa6-664d-f31c-6a5e-cd7dfa137ecf" [ 1143.883042] env[61978]: _type = "Task" [ 1143.883042] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.886127] env[61978]: DEBUG nova.compute.manager [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1143.893596] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529aafa6-664d-f31c-6a5e-cd7dfa137ecf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.027450] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.027450] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.027450] env[61978]: DEBUG nova.network.neutron [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1144.029529] env[61978]: DEBUG nova.network.neutron [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1144.063182] env[61978]: DEBUG oslo_concurrency.lockutils [req-65da5e17-9537-432e-ab5e-2429ada6af5f req-4f86ef2e-423d-4b81-94d2-0e5fcfd7697d service nova] Releasing lock "refresh_cache-94665d8c-df88-4ad0-bb90-547ace2d6345" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.166823] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f338918c-0a26-4bf8-b3f5-47e775f92231 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.930s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.204653] env[61978]: DEBUG nova.network.neutron [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Successfully created port: 50f09356-baf0-487b-a1f9-4cdc359c1daf {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1144.225253] env[61978]: DEBUG nova.network.neutron [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Updating instance_info_cache with network_info: [{"id": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "address": "fa:16:3e:79:9a:37", "network": {"id": "74e44162-dbb7-4a19-b344-6133915c4ab5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-893790941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a1c323dddcd42809d565f46ecf5e18f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ba2eb7-45", "ovs_interfaceid": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.272228] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.357575] env[61978]: DEBUG nova.scheduler.client.report [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1144.398325] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529aafa6-664d-f31c-6a5e-cd7dfa137ecf, 'name': SearchDatastore_Task, 'duration_secs': 0.027194} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.399404] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77782de2-754c-43dd-a9c4-af0c826234a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.405160] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1144.405160] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ab6f67-73f3-bfcf-4888-59882c60b018" [ 1144.405160] env[61978]: _type = "Task" [ 1144.405160] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.414257] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ab6f67-73f3-bfcf-4888-59882c60b018, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.578981] env[61978]: WARNING nova.network.neutron [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] 3b4d30cc-d9a1-4180-b6eb-881241a1c0f4 already exists in list: networks containing: ['3b4d30cc-d9a1-4180-b6eb-881241a1c0f4']. ignoring it [ 1144.727725] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Releasing lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.728087] env[61978]: DEBUG nova.compute.manager [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Instance network_info: |[{"id": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "address": "fa:16:3e:79:9a:37", "network": {"id": "74e44162-dbb7-4a19-b344-6133915c4ab5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-893790941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a1c323dddcd42809d565f46ecf5e18f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ba2eb7-45", "ovs_interfaceid": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1144.728535] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:9a:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1144.736065] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Creating folder: Project (0a1c323dddcd42809d565f46ecf5e18f). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1144.739051] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-176207d4-3bcd-46ed-aedf-b866bb961d14 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.752074] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Created folder: Project (0a1c323dddcd42809d565f46ecf5e18f) in parent group-v295764. [ 1144.752313] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Creating folder: Instances. Parent ref: group-v295967. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1144.752686] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-102257a1-0c53-406c-99cf-6dd66b36557a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.766751] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Created folder: Instances in parent group-v295967. [ 1144.770014] env[61978]: DEBUG oslo.service.loopingcall [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1144.770014] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1144.770014] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1706d50-4b52-4582-8a3f-b345d8450117 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.791350] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1144.791350] env[61978]: value = "task-1395476" [ 1144.791350] env[61978]: _type = "Task" [ 1144.791350] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.800646] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395476, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.865946] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.992s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.869051] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.733s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.869390] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.870081] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1144.870081] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.628s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.874028] env[61978]: INFO nova.compute.claims [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1144.874670] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3fc356-23fe-4472-b43d-24dc15397c2a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.888539] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d85c3b-e29c-498f-8935-7191a1983900 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.895973] env[61978]: DEBUG nova.network.neutron [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Updating instance_info_cache with network_info: [{"id": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "address": "fa:16:3e:62:14:fb", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb04a501f-29", "ovs_interfaceid": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04", "address": "fa:16:3e:58:7f:bf", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ddcad7d-4c", "ovs_interfaceid": "3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.900264] env[61978]: DEBUG nova.compute.manager [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1144.915685] env[61978]: INFO nova.scheduler.client.report [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Deleted allocations for instance 4c7053ee-7c44-49ee-8d30-bf14686c6b1c [ 1144.920800] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3940b982-c93d-4b99-a8f7-3379ee1c5a10 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.928597] env[61978]: DEBUG nova.compute.manager [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Received event network-vif-plugged-63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1144.928841] env[61978]: DEBUG oslo_concurrency.lockutils [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] Acquiring lock "1eae10e8-58b1-435d-86fc-0674725ce6cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.929119] env[61978]: DEBUG oslo_concurrency.lockutils [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] Lock "1eae10e8-58b1-435d-86fc-0674725ce6cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.929248] env[61978]: DEBUG oslo_concurrency.lockutils [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] Lock "1eae10e8-58b1-435d-86fc-0674725ce6cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.929419] env[61978]: DEBUG nova.compute.manager [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] No waiting events found dispatching network-vif-plugged-63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1144.929587] env[61978]: WARNING nova.compute.manager [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Received unexpected event network-vif-plugged-63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 for instance with vm_state building and task_state spawning. [ 1144.929753] env[61978]: DEBUG nova.compute.manager [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Received event network-changed-63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1144.929913] env[61978]: DEBUG nova.compute.manager [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Refreshing instance network info cache due to event network-changed-63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1144.930114] env[61978]: DEBUG oslo_concurrency.lockutils [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] Acquiring lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.930295] env[61978]: DEBUG oslo_concurrency.lockutils [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] Acquired lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.930460] env[61978]: DEBUG nova.network.neutron [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Refreshing network info cache for port 63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1144.940094] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ab6f67-73f3-bfcf-4888-59882c60b018, 'name': SearchDatastore_Task, 'duration_secs': 0.017441} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.943893] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.944201] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 94665d8c-df88-4ad0-bb90-547ace2d6345/94665d8c-df88-4ad0-bb90-547ace2d6345.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1144.946525] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ddaea0f-5d26-4e5f-8281-59fe29b60561 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.949910] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbefb4cc-a73f-499f-a6f1-89c29e6d64b4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.955439] env[61978]: DEBUG nova.virt.hardware [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1144.955652] env[61978]: DEBUG nova.virt.hardware [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1144.955841] env[61978]: DEBUG nova.virt.hardware [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1144.956085] env[61978]: DEBUG nova.virt.hardware [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1144.956248] env[61978]: DEBUG nova.virt.hardware [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1144.956460] env[61978]: DEBUG nova.virt.hardware [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1144.956689] env[61978]: DEBUG nova.virt.hardware [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1144.956890] env[61978]: DEBUG nova.virt.hardware [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1144.957123] env[61978]: DEBUG nova.virt.hardware [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1144.957311] env[61978]: DEBUG nova.virt.hardware [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1144.957508] env[61978]: DEBUG nova.virt.hardware [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1144.958896] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028e1632-0a51-4c72-b509-45d591385097 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.001034] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e730eb-a151-459f-adb0-dd55fd6f6f8a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.006586] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179433MB free_disk=185GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1145.006749] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.008976] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1145.008976] env[61978]: value = "task-1395477" [ 1145.008976] env[61978]: _type = "Task" [ 1145.008976] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.030957] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395477, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.144552] env[61978]: DEBUG nova.compute.manager [req-bcbe5c37-cc1f-46ea-9c2d-4240cff8841a req-b94f33a7-1fcf-4d82-b479-5458502fe75f service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Received event network-changed-e0d58422-d319-4563-81b9-65c067c4b306 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1145.144836] env[61978]: DEBUG nova.compute.manager [req-bcbe5c37-cc1f-46ea-9c2d-4240cff8841a req-b94f33a7-1fcf-4d82-b479-5458502fe75f service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Refreshing instance network info cache due to event network-changed-e0d58422-d319-4563-81b9-65c067c4b306. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1145.145136] env[61978]: DEBUG oslo_concurrency.lockutils [req-bcbe5c37-cc1f-46ea-9c2d-4240cff8841a req-b94f33a7-1fcf-4d82-b479-5458502fe75f service nova] Acquiring lock "refresh_cache-a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.145361] env[61978]: DEBUG oslo_concurrency.lockutils [req-bcbe5c37-cc1f-46ea-9c2d-4240cff8841a req-b94f33a7-1fcf-4d82-b479-5458502fe75f service nova] Acquired lock "refresh_cache-a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.145605] env[61978]: DEBUG nova.network.neutron [req-bcbe5c37-cc1f-46ea-9c2d-4240cff8841a req-b94f33a7-1fcf-4d82-b479-5458502fe75f service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Refreshing network info cache for port e0d58422-d319-4563-81b9-65c067c4b306 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1145.302365] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395476, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.400031] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.400321] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.400493] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.401382] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2420a1cc-9c9d-4471-9eae-96b4c6665328 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.420864] env[61978]: DEBUG nova.virt.hardware [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1145.421200] env[61978]: DEBUG nova.virt.hardware [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1145.421422] env[61978]: DEBUG nova.virt.hardware [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1145.421560] env[61978]: DEBUG nova.virt.hardware [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1145.421728] env[61978]: DEBUG nova.virt.hardware [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1145.421883] env[61978]: DEBUG nova.virt.hardware [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1145.422182] env[61978]: DEBUG nova.virt.hardware [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1145.422439] env[61978]: DEBUG nova.virt.hardware [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1145.422598] env[61978]: DEBUG nova.virt.hardware [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1145.422796] env[61978]: DEBUG nova.virt.hardware [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1145.423016] env[61978]: DEBUG nova.virt.hardware [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1145.429901] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Reconfiguring VM to attach interface {{(pid=61978) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1145.430718] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-006706fc-dba7-4558-8ded-e04a9312b921 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.447946] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d704b46b-e032-46ee-befa-94cd69729f1e tempest-ServersWithSpecificFlavorTestJSON-1877480738 tempest-ServersWithSpecificFlavorTestJSON-1877480738-project-member] Lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.420s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.449859] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.365s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.450335] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-486c620a-7a9d-426b-845c-dd417bfb8646 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.457342] env[61978]: DEBUG oslo_vmware.api [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1145.457342] env[61978]: value = "task-1395478" [ 1145.457342] env[61978]: _type = "Task" [ 1145.457342] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.467500] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aafcaf5f-f51f-41c8-8379-525f2232d0ec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.483502] env[61978]: DEBUG oslo_vmware.api [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395478, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.523376] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395477, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.556215] env[61978]: INFO nova.compute.manager [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Unrescuing [ 1145.556546] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.556727] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquired lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.556904] env[61978]: DEBUG nova.network.neutron [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1145.750075] env[61978]: DEBUG nova.network.neutron [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Updated VIF entry in instance network info cache for port 63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1145.750479] env[61978]: DEBUG nova.network.neutron [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Updating instance_info_cache with network_info: [{"id": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "address": "fa:16:3e:79:9a:37", "network": {"id": "74e44162-dbb7-4a19-b344-6133915c4ab5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-893790941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a1c323dddcd42809d565f46ecf5e18f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ba2eb7-45", "ovs_interfaceid": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.803953] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395476, 'name': CreateVM_Task, 'duration_secs': 0.670788} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.804324] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1145.805175] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.805543] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.806057] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1145.807037] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ffafe43-f65c-4971-af1a-6a18d86c27af {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.813656] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for the task: (returnval){ [ 1145.813656] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c8b82b-92f2-b9b4-8b62-eca9c6954147" [ 1145.813656] env[61978]: _type = "Task" [ 1145.813656] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.824106] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c8b82b-92f2-b9b4-8b62-eca9c6954147, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.974640] env[61978]: DEBUG oslo_vmware.api [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395478, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.976288] env[61978]: DEBUG nova.network.neutron [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Successfully updated port: 50f09356-baf0-487b-a1f9-4cdc359c1daf {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1145.981993] env[61978]: DEBUG nova.network.neutron [req-bcbe5c37-cc1f-46ea-9c2d-4240cff8841a req-b94f33a7-1fcf-4d82-b479-5458502fe75f service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Updated VIF entry in instance network info cache for port e0d58422-d319-4563-81b9-65c067c4b306. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1145.982366] env[61978]: DEBUG nova.network.neutron [req-bcbe5c37-cc1f-46ea-9c2d-4240cff8841a req-b94f33a7-1fcf-4d82-b479-5458502fe75f service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Updating instance_info_cache with network_info: [{"id": "e0d58422-d319-4563-81b9-65c067c4b306", "address": "fa:16:3e:5c:7f:3b", "network": {"id": "4a12b89f-0910-460a-8694-c424a051b6c6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-917593776-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df79d3305e464a6b83f18497a2464140", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d58422-d3", "ovs_interfaceid": "e0d58422-d319-4563-81b9-65c067c4b306", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.018472] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "4c7053ee-7c44-49ee-8d30-bf14686c6b1c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.569s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.027470] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395477, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572433} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.027880] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 94665d8c-df88-4ad0-bb90-547ace2d6345/94665d8c-df88-4ad0-bb90-547ace2d6345.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1146.028413] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1146.028713] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-280dd8bb-24f1-48e7-b243-01ce3f74ecd7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.038541] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1146.038541] env[61978]: value = "task-1395479" [ 1146.038541] env[61978]: _type = "Task" [ 1146.038541] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.048980] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395479, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.211010] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe1e78d-388a-4310-8e79-05b7522a36f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.219835] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef21611c-6d4b-41d0-83a6-5d87c82c82ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.256354] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04498442-0c5f-4df9-bc80-69a04414262e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.259125] env[61978]: DEBUG oslo_concurrency.lockutils [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] Releasing lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.259375] env[61978]: DEBUG nova.compute.manager [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Received event network-vif-plugged-3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1146.259569] env[61978]: DEBUG oslo_concurrency.lockutils [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] Acquiring lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.259775] env[61978]: DEBUG oslo_concurrency.lockutils [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] Lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.259940] env[61978]: DEBUG oslo_concurrency.lockutils [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] Lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.260122] env[61978]: DEBUG nova.compute.manager [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] No waiting events found dispatching network-vif-plugged-3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1146.260296] env[61978]: WARNING nova.compute.manager [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Received unexpected event network-vif-plugged-3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04 for instance with vm_state active and task_state None. [ 1146.260463] env[61978]: DEBUG nova.compute.manager [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Received event network-changed-3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1146.260804] env[61978]: DEBUG nova.compute.manager [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Refreshing instance network info cache due to event network-changed-3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1146.260804] env[61978]: DEBUG oslo_concurrency.lockutils [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] Acquiring lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.260939] env[61978]: DEBUG oslo_concurrency.lockutils [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] Acquired lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.261112] env[61978]: DEBUG nova.network.neutron [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Refreshing network info cache for port 3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1146.268758] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f71e3f-5a89-4ff5-93ee-503aef9c7a28 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.285342] env[61978]: DEBUG nova.compute.provider_tree [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1146.324386] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c8b82b-92f2-b9b4-8b62-eca9c6954147, 'name': SearchDatastore_Task, 'duration_secs': 0.046932} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.324747] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.325055] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1146.325344] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.325506] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.325689] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1146.325997] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b70f139-de91-4ce2-a2c2-e78904ec3cd6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.339991] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1146.340205] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1146.340951] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c6fc60d-97bc-4c66-a8e1-1217a3ccb0fe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.345635] env[61978]: DEBUG nova.network.neutron [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updating instance_info_cache with network_info: [{"id": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "address": "fa:16:3e:0f:c0:2a", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf95a42-13", "ovs_interfaceid": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.347967] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for the task: (returnval){ [ 1146.347967] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ecc643-47b0-5eae-39d5-ad4de635d8e2" [ 1146.347967] env[61978]: _type = "Task" [ 1146.347967] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.356405] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ecc643-47b0-5eae-39d5-ad4de635d8e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.470884] env[61978]: DEBUG oslo_vmware.api [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395478, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.481930] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Acquiring lock "refresh_cache-7d388d5c-2120-4dc5-a04f-5394e1e6f852" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.482107] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Acquired lock "refresh_cache-7d388d5c-2120-4dc5-a04f-5394e1e6f852" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.482474] env[61978]: DEBUG nova.network.neutron [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1146.485713] env[61978]: DEBUG oslo_concurrency.lockutils [req-bcbe5c37-cc1f-46ea-9c2d-4240cff8841a req-b94f33a7-1fcf-4d82-b479-5458502fe75f service nova] Releasing lock "refresh_cache-a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.549702] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395479, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133711} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.550161] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1146.550868] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3dbc5bc-c121-4e51-bd23-2175b7548449 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.573401] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 94665d8c-df88-4ad0-bb90-547ace2d6345/94665d8c-df88-4ad0-bb90-547ace2d6345.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1146.573617] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f20837f7-3365-4202-a86e-df76de7d4ee9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.594670] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1146.594670] env[61978]: value = "task-1395480" [ 1146.594670] env[61978]: _type = "Task" [ 1146.594670] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.607132] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395480, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.788615] env[61978]: DEBUG nova.scheduler.client.report [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1146.850966] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Releasing lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.852042] env[61978]: DEBUG nova.objects.instance [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lazy-loading 'flavor' on Instance uuid 9ee04ee8-98ec-4be9-935d-cad7cd176466 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1146.865051] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ecc643-47b0-5eae-39d5-ad4de635d8e2, 'name': SearchDatastore_Task, 'duration_secs': 0.039126} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.865051] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcae19d8-5636-40d9-a0b8-54cf8ffbc426 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.874646] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for the task: (returnval){ [ 1146.874646] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aaf392-a655-3653-2452-1241d5a1099e" [ 1146.874646] env[61978]: _type = "Task" [ 1146.874646] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.891290] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aaf392-a655-3653-2452-1241d5a1099e, 'name': SearchDatastore_Task} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.891732] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.892165] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 1eae10e8-58b1-435d-86fc-0674725ce6cd/1eae10e8-58b1-435d-86fc-0674725ce6cd.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1146.892546] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5ad987c-0c3e-4a71-9a8a-7fad6223b189 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.901251] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for the task: (returnval){ [ 1146.901251] env[61978]: value = "task-1395481" [ 1146.901251] env[61978]: _type = "Task" [ 1146.901251] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.911823] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395481, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.972481] env[61978]: DEBUG oslo_vmware.api [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395478, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.983129] env[61978]: DEBUG nova.network.neutron [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Updated VIF entry in instance network info cache for port 3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1146.983627] env[61978]: DEBUG nova.network.neutron [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Updating instance_info_cache with network_info: [{"id": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "address": "fa:16:3e:62:14:fb", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb04a501f-29", "ovs_interfaceid": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04", "address": "fa:16:3e:58:7f:bf", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ddcad7d-4c", "ovs_interfaceid": "3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.015505] env[61978]: DEBUG nova.network.neutron [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1147.105384] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395480, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.169984] env[61978]: DEBUG nova.network.neutron [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Updating instance_info_cache with network_info: [{"id": "50f09356-baf0-487b-a1f9-4cdc359c1daf", "address": "fa:16:3e:21:8d:76", "network": {"id": "019b4bfd-7ed1-461e-bb1b-c82147395ef2", "bridge": "br-int", "label": "tempest-ServersTestJSON-566740792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad349fcfb78f46f0be51dfa32f635c59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f09356-ba", "ovs_interfaceid": "50f09356-baf0-487b-a1f9-4cdc359c1daf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.293326] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.423s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.294112] env[61978]: DEBUG nova.compute.manager [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1147.299449] env[61978]: DEBUG nova.compute.manager [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Received event network-vif-plugged-50f09356-baf0-487b-a1f9-4cdc359c1daf {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1147.299449] env[61978]: DEBUG oslo_concurrency.lockutils [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] Acquiring lock "7d388d5c-2120-4dc5-a04f-5394e1e6f852-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.299449] env[61978]: DEBUG oslo_concurrency.lockutils [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] Lock "7d388d5c-2120-4dc5-a04f-5394e1e6f852-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.299449] env[61978]: DEBUG oslo_concurrency.lockutils [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] Lock "7d388d5c-2120-4dc5-a04f-5394e1e6f852-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.299449] env[61978]: DEBUG nova.compute.manager [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] No waiting events found dispatching network-vif-plugged-50f09356-baf0-487b-a1f9-4cdc359c1daf {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1147.299449] env[61978]: WARNING nova.compute.manager [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Received unexpected event network-vif-plugged-50f09356-baf0-487b-a1f9-4cdc359c1daf for instance with vm_state building and task_state spawning. [ 1147.299449] env[61978]: DEBUG nova.compute.manager [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Received event network-changed-50f09356-baf0-487b-a1f9-4cdc359c1daf {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1147.299449] env[61978]: DEBUG nova.compute.manager [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Refreshing instance network info cache due to event network-changed-50f09356-baf0-487b-a1f9-4cdc359c1daf. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1147.299449] env[61978]: DEBUG oslo_concurrency.lockutils [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] Acquiring lock "refresh_cache-7d388d5c-2120-4dc5-a04f-5394e1e6f852" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1147.299857] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.697s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.300038] env[61978]: DEBUG nova.objects.instance [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lazy-loading 'resources' on Instance uuid 0cdff646-34ad-49d5-b775-28e8e7ce778e {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.362896] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41069515-4c46-41b7-a7b0-6fb7cdb25daa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.394152] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1147.394376] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd955783-82bd-4279-8713-abf6cf6207cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.404143] env[61978]: DEBUG oslo_vmware.api [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1147.404143] env[61978]: value = "task-1395482" [ 1147.404143] env[61978]: _type = "Task" [ 1147.404143] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.423439] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395481, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.423966] env[61978]: DEBUG oslo_vmware.api [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395482, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.475935] env[61978]: DEBUG oslo_vmware.api [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395478, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.488497] env[61978]: DEBUG oslo_concurrency.lockutils [req-08515b3d-e175-4b12-a1bf-a4dbf3472ae3 req-896b7f74-9d9c-4005-9897-4c692b8c563c service nova] Releasing lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1147.608361] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395480, 'name': ReconfigVM_Task, 'duration_secs': 0.814481} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.608939] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 94665d8c-df88-4ad0-bb90-547ace2d6345/94665d8c-df88-4ad0-bb90-547ace2d6345.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1147.609882] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-380f9fef-e8b4-428a-9dd7-79dc0a0af3e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.620536] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1147.620536] env[61978]: value = "task-1395483" [ 1147.620536] env[61978]: _type = "Task" [ 1147.620536] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.633214] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395483, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.676022] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Releasing lock "refresh_cache-7d388d5c-2120-4dc5-a04f-5394e1e6f852" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1147.676022] env[61978]: DEBUG nova.compute.manager [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Instance network_info: |[{"id": "50f09356-baf0-487b-a1f9-4cdc359c1daf", "address": "fa:16:3e:21:8d:76", "network": {"id": "019b4bfd-7ed1-461e-bb1b-c82147395ef2", "bridge": "br-int", "label": "tempest-ServersTestJSON-566740792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad349fcfb78f46f0be51dfa32f635c59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f09356-ba", "ovs_interfaceid": "50f09356-baf0-487b-a1f9-4cdc359c1daf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1147.676022] env[61978]: DEBUG oslo_concurrency.lockutils [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] Acquired lock "refresh_cache-7d388d5c-2120-4dc5-a04f-5394e1e6f852" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.676022] env[61978]: DEBUG nova.network.neutron [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Refreshing network info cache for port 50f09356-baf0-487b-a1f9-4cdc359c1daf {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1147.677852] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:8d:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b356db78-99c7-4464-822c-fc7e193f7878', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50f09356-baf0-487b-a1f9-4cdc359c1daf', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1147.690678] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Creating folder: Project (ad349fcfb78f46f0be51dfa32f635c59). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1147.692094] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba77f4a3-7768-4244-bd1d-1ff69cea9889 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.712402] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Created folder: Project (ad349fcfb78f46f0be51dfa32f635c59) in parent group-v295764. [ 1147.713060] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Creating folder: Instances. Parent ref: group-v295970. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1147.713510] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7c52877-651f-43c5-b96c-265691e5f3c6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.727590] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Created folder: Instances in parent group-v295970. [ 1147.728164] env[61978]: DEBUG oslo.service.loopingcall [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1147.728391] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1147.728981] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa6b7995-2e56-4131-a7fa-b8c8228fb9fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.753449] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1147.753449] env[61978]: value = "task-1395486" [ 1147.753449] env[61978]: _type = "Task" [ 1147.753449] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.764374] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395486, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.802084] env[61978]: DEBUG nova.compute.utils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1147.806337] env[61978]: DEBUG nova.compute.manager [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1147.806716] env[61978]: DEBUG nova.network.neutron [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1147.873042] env[61978]: DEBUG nova.policy [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f20b272502341bd80be470f98554d1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d95ebcafdca43b8a1636e21c7258803', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1147.914947] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395481, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.805161} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.918081] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 1eae10e8-58b1-435d-86fc-0674725ce6cd/1eae10e8-58b1-435d-86fc-0674725ce6cd.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1147.918350] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1147.918945] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d2c0836-1865-4388-9a7d-bf794696d147 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.924125] env[61978]: DEBUG oslo_vmware.api [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395482, 'name': PowerOffVM_Task, 'duration_secs': 0.349672} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.925008] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1147.931115] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Reconfiguring VM instance instance-0000003a to detach disk 2002 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1147.935168] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7554fa34-464b-45ec-ba64-a48be678c19e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.949653] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for the task: (returnval){ [ 1147.949653] env[61978]: value = "task-1395487" [ 1147.949653] env[61978]: _type = "Task" [ 1147.949653] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.956354] env[61978]: DEBUG oslo_vmware.api [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1147.956354] env[61978]: value = "task-1395488" [ 1147.956354] env[61978]: _type = "Task" [ 1147.956354] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.966030] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395487, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.973346] env[61978]: DEBUG oslo_vmware.api [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395488, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.982167] env[61978]: DEBUG oslo_vmware.api [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395478, 'name': ReconfigVM_Task, 'duration_secs': 2.209473} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.982846] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1147.983284] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Reconfigured VM to attach interface {{(pid=61978) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1148.133480] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395483, 'name': Rename_Task, 'duration_secs': 0.272058} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.133480] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1148.133480] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc465799-49d9-46c7-9927-bc7ba90b47a7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.137140] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dea574e-47bc-4d41-a4d3-eb282c0b9dbd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.149035] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8366f2-685f-4728-b50a-4e8525f05e9b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.155442] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1148.155442] env[61978]: value = "task-1395489" [ 1148.155442] env[61978]: _type = "Task" [ 1148.155442] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.193565] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395489, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.195019] env[61978]: DEBUG nova.network.neutron [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Successfully created port: d5e0a55d-dd67-40cf-ad0c-76910a2013aa {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1148.198401] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccab164-e816-4c96-a947-185f3b87c7fa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.208334] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ffbdfd-0120-4581-84d3-de067d408742 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.224839] env[61978]: DEBUG nova.compute.provider_tree [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1148.240468] env[61978]: DEBUG nova.compute.manager [None req-4fe06dad-c265-4691-ad03-85b135190de9 tempest-ServerDiagnosticsV248Test-290529957 tempest-ServerDiagnosticsV248Test-290529957-project-admin] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1148.242455] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc9a032-41f1-4734-95cf-cc9e68453062 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.255778] env[61978]: INFO nova.compute.manager [None req-4fe06dad-c265-4691-ad03-85b135190de9 tempest-ServerDiagnosticsV248Test-290529957 tempest-ServerDiagnosticsV248Test-290529957-project-admin] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Retrieving diagnostics [ 1148.259821] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d882f1a-e3e9-46c2-bd5e-8bc696828af5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.269380] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395486, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.310210] env[61978]: DEBUG nova.compute.manager [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1148.466185] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395487, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073885} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.467193] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1148.468487] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b4a749-b986-4a2f-a561-de4c6dc2ab53 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.478428] env[61978]: DEBUG oslo_vmware.api [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395488, 'name': ReconfigVM_Task, 'duration_secs': 0.262368} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.478428] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Reconfigured VM instance instance-0000003a to detach disk 2002 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1148.478569] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1148.478879] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab006d82-4465-480e-9942-df5d5384f905 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.498397] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bf7a4e2b-213e-40ea-beb1-64fd59998222 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.365s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1148.515117] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 1eae10e8-58b1-435d-86fc-0674725ce6cd/1eae10e8-58b1-435d-86fc-0674725ce6cd.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1148.520454] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-076de8e0-395b-4b7a-813f-c52c2cef7683 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.548946] env[61978]: DEBUG oslo_vmware.api [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1148.548946] env[61978]: value = "task-1395490" [ 1148.548946] env[61978]: _type = "Task" [ 1148.548946] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.557247] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for the task: (returnval){ [ 1148.557247] env[61978]: value = "task-1395491" [ 1148.557247] env[61978]: _type = "Task" [ 1148.557247] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.568532] env[61978]: DEBUG oslo_vmware.api [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395490, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.573488] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395491, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.609493] env[61978]: DEBUG nova.network.neutron [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Updated VIF entry in instance network info cache for port 50f09356-baf0-487b-a1f9-4cdc359c1daf. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1148.609885] env[61978]: DEBUG nova.network.neutron [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Updating instance_info_cache with network_info: [{"id": "50f09356-baf0-487b-a1f9-4cdc359c1daf", "address": "fa:16:3e:21:8d:76", "network": {"id": "019b4bfd-7ed1-461e-bb1b-c82147395ef2", "bridge": "br-int", "label": "tempest-ServersTestJSON-566740792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad349fcfb78f46f0be51dfa32f635c59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f09356-ba", "ovs_interfaceid": "50f09356-baf0-487b-a1f9-4cdc359c1daf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.665746] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395489, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.758016] env[61978]: ERROR nova.scheduler.client.report [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] [req-f196f22b-cd1b-44f2-8372-6d275f943559] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f196f22b-cd1b-44f2-8372-6d275f943559"}]} [ 1148.769829] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395486, 'name': CreateVM_Task, 'duration_secs': 0.547761} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.770039] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1148.771991] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1148.771991] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.771991] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1148.771991] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95eb27b9-938c-4ccd-a29e-9f313df3e2f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.777486] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Waiting for the task: (returnval){ [ 1148.777486] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52effd9e-488b-cf9a-1437-460f3b8bb551" [ 1148.777486] env[61978]: _type = "Task" [ 1148.777486] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.779897] env[61978]: DEBUG nova.scheduler.client.report [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1148.789754] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52effd9e-488b-cf9a-1437-460f3b8bb551, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.795294] env[61978]: DEBUG nova.scheduler.client.report [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1148.795527] env[61978]: DEBUG nova.compute.provider_tree [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1148.808737] env[61978]: DEBUG nova.scheduler.client.report [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1148.829278] env[61978]: DEBUG nova.scheduler.client.report [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1149.062913] env[61978]: DEBUG oslo_vmware.api [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395490, 'name': PowerOnVM_Task, 'duration_secs': 0.393977} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.066413] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1149.066683] env[61978]: DEBUG nova.compute.manager [None req-e524b2bb-b943-48c2-9ea4-d409d6f166bd tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1149.067581] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7f41e9-8965-4084-bcc3-1481726e24fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.079730] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395491, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.112400] env[61978]: DEBUG oslo_concurrency.lockutils [req-76e59fc7-f6dd-410d-a891-fc5eadfdcbac req-0601229e-4d85-42b3-8381-216a65b1b704 service nova] Releasing lock "refresh_cache-7d388d5c-2120-4dc5-a04f-5394e1e6f852" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.153292] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b2d96c-5832-4c02-a836-1d27a0d154b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.165125] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5acb55d1-582e-42eb-88ee-36aa2a64970f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.172029] env[61978]: DEBUG oslo_vmware.api [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395489, 'name': PowerOnVM_Task, 'duration_secs': 0.883402} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.174157] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1149.174157] env[61978]: INFO nova.compute.manager [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Took 9.29 seconds to spawn the instance on the hypervisor. [ 1149.174157] env[61978]: DEBUG nova.compute.manager [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1149.174157] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b5e0b5-0fde-4a92-b165-952868c13de1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.207381] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b85827-011d-4aff-9da3-87fee71a9dc7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.221234] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2820dcf2-8ee2-4183-964b-275769be0347 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.235294] env[61978]: DEBUG nova.compute.provider_tree [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1149.292812] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52effd9e-488b-cf9a-1437-460f3b8bb551, 'name': SearchDatastore_Task, 'duration_secs': 0.017052} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.294609] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.294609] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1149.294609] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.294609] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.294609] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1149.294609] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-470fbff1-303a-4151-93ff-22ab2711cf77 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.308997] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1149.309159] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1149.309847] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf5dbe7d-2098-4a17-8a20-b41670cf5350 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.315489] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Waiting for the task: (returnval){ [ 1149.315489] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5253599a-26b3-ebba-1278-64026cc61b38" [ 1149.315489] env[61978]: _type = "Task" [ 1149.315489] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.326982] env[61978]: DEBUG nova.compute.manager [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1149.329204] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5253599a-26b3-ebba-1278-64026cc61b38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.354252] env[61978]: DEBUG nova.virt.hardware [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1149.354683] env[61978]: DEBUG nova.virt.hardware [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1149.354683] env[61978]: DEBUG nova.virt.hardware [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1149.356068] env[61978]: DEBUG nova.virt.hardware [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1149.356068] env[61978]: DEBUG nova.virt.hardware [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1149.356068] env[61978]: DEBUG nova.virt.hardware [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1149.356068] env[61978]: DEBUG nova.virt.hardware [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1149.356068] env[61978]: DEBUG nova.virt.hardware [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1149.356068] env[61978]: DEBUG nova.virt.hardware [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1149.356337] env[61978]: DEBUG nova.virt.hardware [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1149.356507] env[61978]: DEBUG nova.virt.hardware [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1149.357458] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97447f5f-e564-4d37-a313-1726ac3915db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.366224] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63dac9db-7040-4c2d-bb02-bdba61c807a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.577188] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395491, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.729026] env[61978]: INFO nova.compute.manager [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Took 25.38 seconds to build instance. [ 1149.753602] env[61978]: DEBUG nova.compute.manager [req-40657cc6-d862-4969-8c8d-35b84144b311 req-735bcd3c-2055-410c-b851-a2b5b45946cd service nova] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Received event network-vif-plugged-d5e0a55d-dd67-40cf-ad0c-76910a2013aa {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1149.753602] env[61978]: DEBUG oslo_concurrency.lockutils [req-40657cc6-d862-4969-8c8d-35b84144b311 req-735bcd3c-2055-410c-b851-a2b5b45946cd service nova] Acquiring lock "81f0b79c-97b3-4a5d-a8fc-7c2250571177-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1149.753602] env[61978]: DEBUG oslo_concurrency.lockutils [req-40657cc6-d862-4969-8c8d-35b84144b311 req-735bcd3c-2055-410c-b851-a2b5b45946cd service nova] Lock "81f0b79c-97b3-4a5d-a8fc-7c2250571177-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.753602] env[61978]: DEBUG oslo_concurrency.lockutils [req-40657cc6-d862-4969-8c8d-35b84144b311 req-735bcd3c-2055-410c-b851-a2b5b45946cd service nova] Lock "81f0b79c-97b3-4a5d-a8fc-7c2250571177-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.753602] env[61978]: DEBUG nova.compute.manager [req-40657cc6-d862-4969-8c8d-35b84144b311 req-735bcd3c-2055-410c-b851-a2b5b45946cd service nova] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] No waiting events found dispatching network-vif-plugged-d5e0a55d-dd67-40cf-ad0c-76910a2013aa {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1149.755049] env[61978]: WARNING nova.compute.manager [req-40657cc6-d862-4969-8c8d-35b84144b311 req-735bcd3c-2055-410c-b851-a2b5b45946cd service nova] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Received unexpected event network-vif-plugged-d5e0a55d-dd67-40cf-ad0c-76910a2013aa for instance with vm_state building and task_state spawning. [ 1149.777034] env[61978]: DEBUG nova.scheduler.client.report [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 100 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1149.777034] env[61978]: DEBUG nova.compute.provider_tree [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 100 to 101 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1149.777034] env[61978]: DEBUG nova.compute.provider_tree [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1149.828265] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5253599a-26b3-ebba-1278-64026cc61b38, 'name': SearchDatastore_Task, 'duration_secs': 0.05427} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.829379] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b8d4366-250a-45a1-905d-eb4b961c0a11 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.839192] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Waiting for the task: (returnval){ [ 1149.839192] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5285ccad-0823-47cf-4885-9e809eca5b27" [ 1149.839192] env[61978]: _type = "Task" [ 1149.839192] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.848713] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5285ccad-0823-47cf-4885-9e809eca5b27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.895905] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Acquiring lock "38e4f039-20bc-4bed-b449-227bde070ed9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1149.896372] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Lock "38e4f039-20bc-4bed-b449-227bde070ed9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.896372] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Acquiring lock "38e4f039-20bc-4bed-b449-227bde070ed9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1149.896952] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Lock "38e4f039-20bc-4bed-b449-227bde070ed9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.896952] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Lock "38e4f039-20bc-4bed-b449-227bde070ed9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.899727] env[61978]: INFO nova.compute.manager [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Terminating instance [ 1149.901547] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Acquiring lock "refresh_cache-38e4f039-20bc-4bed-b449-227bde070ed9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.901781] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Acquired lock "refresh_cache-38e4f039-20bc-4bed-b449-227bde070ed9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.902503] env[61978]: DEBUG nova.network.neutron [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.973864] env[61978]: DEBUG nova.network.neutron [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Successfully updated port: d5e0a55d-dd67-40cf-ad0c-76910a2013aa {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1150.083203] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395491, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.232359] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b16cbd3b-782d-4219-8342-f88054c2595c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "94665d8c-df88-4ad0-bb90-547ace2d6345" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.893s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.235206] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "94665d8c-df88-4ad0-bb90-547ace2d6345" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 14.145s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.235206] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55abc7c9-4f7c-45d7-b06e-1b380f4ee901 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.282276] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.982s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.286155] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.479s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.286431] env[61978]: DEBUG nova.objects.instance [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lazy-loading 'resources' on Instance uuid de0f46af-870a-4095-a417-913a2c51f66b {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1150.314283] env[61978]: INFO nova.scheduler.client.report [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Deleted allocations for instance 0cdff646-34ad-49d5-b775-28e8e7ce778e [ 1150.351674] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5285ccad-0823-47cf-4885-9e809eca5b27, 'name': SearchDatastore_Task, 'duration_secs': 0.01225} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.351824] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1150.351996] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 7d388d5c-2120-4dc5-a04f-5394e1e6f852/7d388d5c-2120-4dc5-a04f-5394e1e6f852.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1150.352296] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07fa0c10-8722-4444-a95b-a41bdfdd3fe8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.355399] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "interface-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.355566] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.364169] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Waiting for the task: (returnval){ [ 1150.364169] env[61978]: value = "task-1395492" [ 1150.364169] env[61978]: _type = "Task" [ 1150.364169] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.376408] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.428405] env[61978]: DEBUG nova.network.neutron [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1150.440874] env[61978]: DEBUG nova.compute.manager [req-245aa559-aa17-49b9-81f6-b09167954f26 req-dc22f211-3b3f-458e-b8a3-685d8e028dbc service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Received event network-changed-fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1150.440874] env[61978]: DEBUG nova.compute.manager [req-245aa559-aa17-49b9-81f6-b09167954f26 req-dc22f211-3b3f-458e-b8a3-685d8e028dbc service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Refreshing instance network info cache due to event network-changed-fdf95a42-1379-4895-9a94-f8a8cf1d070d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1150.440874] env[61978]: DEBUG oslo_concurrency.lockutils [req-245aa559-aa17-49b9-81f6-b09167954f26 req-dc22f211-3b3f-458e-b8a3-685d8e028dbc service nova] Acquiring lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1150.441019] env[61978]: DEBUG oslo_concurrency.lockutils [req-245aa559-aa17-49b9-81f6-b09167954f26 req-dc22f211-3b3f-458e-b8a3-685d8e028dbc service nova] Acquired lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.442113] env[61978]: DEBUG nova.network.neutron [req-245aa559-aa17-49b9-81f6-b09167954f26 req-dc22f211-3b3f-458e-b8a3-685d8e028dbc service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Refreshing network info cache for port fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1150.475651] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "refresh_cache-81f0b79c-97b3-4a5d-a8fc-7c2250571177" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1150.475813] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "refresh_cache-81f0b79c-97b3-4a5d-a8fc-7c2250571177" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.479102] env[61978]: DEBUG nova.network.neutron [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1150.510274] env[61978]: DEBUG nova.network.neutron [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.579605] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395491, 'name': ReconfigVM_Task, 'duration_secs': 1.550398} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.579913] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 1eae10e8-58b1-435d-86fc-0674725ce6cd/1eae10e8-58b1-435d-86fc-0674725ce6cd.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1150.583386] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fed46b70-0de4-46ce-9c60-968d1b541802 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.590127] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for the task: (returnval){ [ 1150.590127] env[61978]: value = "task-1395493" [ 1150.590127] env[61978]: _type = "Task" [ 1150.590127] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.599992] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395493, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.745409] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "94665d8c-df88-4ad0-bb90-547ace2d6345" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.512s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.826560] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8618762e-e79f-45c3-ad65-ee76239ce62b tempest-ServerRescueTestJSON-539208286 tempest-ServerRescueTestJSON-539208286-project-member] Lock "0cdff646-34ad-49d5-b775-28e8e7ce778e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.615s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.860317] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1150.860663] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.861732] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e3e230-c2ff-49a0-b441-94a5b5d71472 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.881268] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395492, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.899747] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04a6e48-c023-4fbb-a6b0-a5bd0c234786 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.929482] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Reconfiguring VM to detach interface {{(pid=61978) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1150.932976] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20841c0b-56f9-4dc0-a8db-6fe5c624e5a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.957703] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1150.957703] env[61978]: value = "task-1395494" [ 1150.957703] env[61978]: _type = "Task" [ 1150.957703] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.970974] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.017493] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Releasing lock "refresh_cache-38e4f039-20bc-4bed-b449-227bde070ed9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1151.018175] env[61978]: DEBUG nova.compute.manager [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1151.018598] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1151.019671] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24aec38-21ab-41a4-bde9-6af62758e5ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.028970] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1151.029547] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-571f9b9e-60c7-458b-b7d2-48d99695a83a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.037230] env[61978]: DEBUG oslo_vmware.api [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Waiting for the task: (returnval){ [ 1151.037230] env[61978]: value = "task-1395495" [ 1151.037230] env[61978]: _type = "Task" [ 1151.037230] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.050195] env[61978]: DEBUG oslo_vmware.api [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.081281] env[61978]: DEBUG nova.network.neutron [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1151.100781] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395493, 'name': Rename_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.185156] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1e6d51-f953-4ee9-8e80-f3a5c38a14aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.193593] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365c0752-c8fd-4d81-a077-a47971d23554 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.229176] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53e4c94-b644-489f-9163-293041ead5e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.238578] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d5353e-ab45-42a0-a6dd-27afe54a3b86 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.253505] env[61978]: DEBUG nova.compute.provider_tree [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1151.314593] env[61978]: DEBUG nova.network.neutron [req-245aa559-aa17-49b9-81f6-b09167954f26 req-dc22f211-3b3f-458e-b8a3-685d8e028dbc service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updated VIF entry in instance network info cache for port fdf95a42-1379-4895-9a94-f8a8cf1d070d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1151.314869] env[61978]: DEBUG nova.network.neutron [req-245aa559-aa17-49b9-81f6-b09167954f26 req-dc22f211-3b3f-458e-b8a3-685d8e028dbc service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updating instance_info_cache with network_info: [{"id": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "address": "fa:16:3e:0f:c0:2a", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf95a42-13", "ovs_interfaceid": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.348367] env[61978]: DEBUG nova.network.neutron [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Updating instance_info_cache with network_info: [{"id": "d5e0a55d-dd67-40cf-ad0c-76910a2013aa", "address": "fa:16:3e:42:51:d5", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5e0a55d-dd", "ovs_interfaceid": "d5e0a55d-dd67-40cf-ad0c-76910a2013aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.382872] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395492, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695911} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.383220] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 7d388d5c-2120-4dc5-a04f-5394e1e6f852/7d388d5c-2120-4dc5-a04f-5394e1e6f852.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1151.383395] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1151.383658] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d2adfe17-c850-4117-909e-a8a91d31b5b7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.392482] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Waiting for the task: (returnval){ [ 1151.392482] env[61978]: value = "task-1395496" [ 1151.392482] env[61978]: _type = "Task" [ 1151.392482] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.407169] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395496, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.469474] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.548814] env[61978]: DEBUG oslo_vmware.api [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395495, 'name': PowerOffVM_Task, 'duration_secs': 0.240778} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.549117] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1151.549299] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1151.549564] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83b3861e-f574-43fa-b1c8-7dc43a837456 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.581560] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1151.581788] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1151.582079] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Deleting the datastore file [datastore2] 38e4f039-20bc-4bed-b449-227bde070ed9 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1151.582289] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5f13bd8-76c1-40a1-a784-7337036b0959 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.591352] env[61978]: DEBUG oslo_vmware.api [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Waiting for the task: (returnval){ [ 1151.591352] env[61978]: value = "task-1395498" [ 1151.591352] env[61978]: _type = "Task" [ 1151.591352] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.607030] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395493, 'name': Rename_Task, 'duration_secs': 0.523905} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.613962] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1151.614331] env[61978]: DEBUG oslo_vmware.api [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395498, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.614550] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30f92fed-0151-4c72-aec9-76ac3c3a9bab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.623149] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for the task: (returnval){ [ 1151.623149] env[61978]: value = "task-1395499" [ 1151.623149] env[61978]: _type = "Task" [ 1151.623149] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.632028] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395499, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.797242] env[61978]: DEBUG nova.scheduler.client.report [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 101 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1151.797576] env[61978]: DEBUG nova.compute.provider_tree [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 101 to 102 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1151.797772] env[61978]: DEBUG nova.compute.provider_tree [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1151.817639] env[61978]: DEBUG oslo_concurrency.lockutils [req-245aa559-aa17-49b9-81f6-b09167954f26 req-dc22f211-3b3f-458e-b8a3-685d8e028dbc service nova] Releasing lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1151.830135] env[61978]: DEBUG nova.compute.manager [req-d828a51e-1494-4706-8234-b38f56fd3eba req-a3b3d54e-6cdd-4cbf-94b9-9c1c37facfc6 service nova] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Received event network-changed-d5e0a55d-dd67-40cf-ad0c-76910a2013aa {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1151.830135] env[61978]: DEBUG nova.compute.manager [req-d828a51e-1494-4706-8234-b38f56fd3eba req-a3b3d54e-6cdd-4cbf-94b9-9c1c37facfc6 service nova] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Refreshing instance network info cache due to event network-changed-d5e0a55d-dd67-40cf-ad0c-76910a2013aa. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1151.830135] env[61978]: DEBUG oslo_concurrency.lockutils [req-d828a51e-1494-4706-8234-b38f56fd3eba req-a3b3d54e-6cdd-4cbf-94b9-9c1c37facfc6 service nova] Acquiring lock "refresh_cache-81f0b79c-97b3-4a5d-a8fc-7c2250571177" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.850701] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "refresh_cache-81f0b79c-97b3-4a5d-a8fc-7c2250571177" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1151.851022] env[61978]: DEBUG nova.compute.manager [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Instance network_info: |[{"id": "d5e0a55d-dd67-40cf-ad0c-76910a2013aa", "address": "fa:16:3e:42:51:d5", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5e0a55d-dd", "ovs_interfaceid": "d5e0a55d-dd67-40cf-ad0c-76910a2013aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1151.851426] env[61978]: DEBUG oslo_concurrency.lockutils [req-d828a51e-1494-4706-8234-b38f56fd3eba req-a3b3d54e-6cdd-4cbf-94b9-9c1c37facfc6 service nova] Acquired lock "refresh_cache-81f0b79c-97b3-4a5d-a8fc-7c2250571177" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.851706] env[61978]: DEBUG nova.network.neutron [req-d828a51e-1494-4706-8234-b38f56fd3eba req-a3b3d54e-6cdd-4cbf-94b9-9c1c37facfc6 service nova] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Refreshing network info cache for port d5e0a55d-dd67-40cf-ad0c-76910a2013aa {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.852967] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:51:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5e0a55d-dd67-40cf-ad0c-76910a2013aa', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1151.864423] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Creating folder: Project (8d95ebcafdca43b8a1636e21c7258803). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1151.870861] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-061677ad-b940-4559-922a-322eba8b9b46 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.886540] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Created folder: Project (8d95ebcafdca43b8a1636e21c7258803) in parent group-v295764. [ 1151.886819] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Creating folder: Instances. Parent ref: group-v295973. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1151.887162] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b561fdb1-6d5d-419f-9e99-a22d87fd11e7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.905558] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Created folder: Instances in parent group-v295973. [ 1151.905824] env[61978]: DEBUG oslo.service.loopingcall [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1151.911502] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1151.911502] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbb41a40-24a1-4da3-be17-e62d78774218 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.930447] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395496, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.252659} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.931351] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1151.932549] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb0d15d4-092b-4c01-a2c2-ea761e17b04d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.936950] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1151.936950] env[61978]: value = "task-1395502" [ 1151.936950] env[61978]: _type = "Task" [ 1151.936950] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.960254] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 7d388d5c-2120-4dc5-a04f-5394e1e6f852/7d388d5c-2120-4dc5-a04f-5394e1e6f852.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1151.961387] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb09378e-b39a-4fe8-b34c-8d9b51c04074 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.990744] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395502, 'name': CreateVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.001950] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.003651] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Waiting for the task: (returnval){ [ 1152.003651] env[61978]: value = "task-1395503" [ 1152.003651] env[61978]: _type = "Task" [ 1152.003651] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.014752] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395503, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.102935] env[61978]: DEBUG oslo_vmware.api [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Task: {'id': task-1395498, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285275} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.103277] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1152.103509] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1152.104406] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1152.104406] env[61978]: INFO nova.compute.manager [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1152.104406] env[61978]: DEBUG oslo.service.loopingcall [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1152.104559] env[61978]: DEBUG nova.compute.manager [-] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1152.104594] env[61978]: DEBUG nova.network.neutron [-] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1152.130805] env[61978]: DEBUG nova.network.neutron [-] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1152.137374] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395499, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.243595] env[61978]: DEBUG nova.network.neutron [req-d828a51e-1494-4706-8234-b38f56fd3eba req-a3b3d54e-6cdd-4cbf-94b9-9c1c37facfc6 service nova] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Updated VIF entry in instance network info cache for port d5e0a55d-dd67-40cf-ad0c-76910a2013aa. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1152.244319] env[61978]: DEBUG nova.network.neutron [req-d828a51e-1494-4706-8234-b38f56fd3eba req-a3b3d54e-6cdd-4cbf-94b9-9c1c37facfc6 service nova] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Updating instance_info_cache with network_info: [{"id": "d5e0a55d-dd67-40cf-ad0c-76910a2013aa", "address": "fa:16:3e:42:51:d5", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5e0a55d-dd", "ovs_interfaceid": "d5e0a55d-dd67-40cf-ad0c-76910a2013aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.303809] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.018s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.306252] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.373s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.308112] env[61978]: INFO nova.compute.claims [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1152.338973] env[61978]: INFO nova.scheduler.client.report [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleted allocations for instance de0f46af-870a-4095-a417-913a2c51f66b [ 1152.446878] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395502, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.473611] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.491706] env[61978]: DEBUG nova.compute.manager [req-0ae4e802-a6b7-41b2-ac03-d79756375904 req-0e699bc1-2cf0-48c1-abcc-ae9269ce2038 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Received event network-changed-fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1152.492016] env[61978]: DEBUG nova.compute.manager [req-0ae4e802-a6b7-41b2-ac03-d79756375904 req-0e699bc1-2cf0-48c1-abcc-ae9269ce2038 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Refreshing instance network info cache due to event network-changed-fdf95a42-1379-4895-9a94-f8a8cf1d070d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1152.492318] env[61978]: DEBUG oslo_concurrency.lockutils [req-0ae4e802-a6b7-41b2-ac03-d79756375904 req-0e699bc1-2cf0-48c1-abcc-ae9269ce2038 service nova] Acquiring lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.492527] env[61978]: DEBUG oslo_concurrency.lockutils [req-0ae4e802-a6b7-41b2-ac03-d79756375904 req-0e699bc1-2cf0-48c1-abcc-ae9269ce2038 service nova] Acquired lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.492750] env[61978]: DEBUG nova.network.neutron [req-0ae4e802-a6b7-41b2-ac03-d79756375904 req-0e699bc1-2cf0-48c1-abcc-ae9269ce2038 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Refreshing network info cache for port fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1152.515163] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395503, 'name': ReconfigVM_Task, 'duration_secs': 0.30171} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.518236] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 7d388d5c-2120-4dc5-a04f-5394e1e6f852/7d388d5c-2120-4dc5-a04f-5394e1e6f852.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.519742] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba39a9f3-d0f8-432e-bf1d-426d53cb2820 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.527874] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Waiting for the task: (returnval){ [ 1152.527874] env[61978]: value = "task-1395504" [ 1152.527874] env[61978]: _type = "Task" [ 1152.527874] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.538370] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395504, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.636594] env[61978]: DEBUG nova.network.neutron [-] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.637855] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395499, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.747347] env[61978]: DEBUG oslo_concurrency.lockutils [req-d828a51e-1494-4706-8234-b38f56fd3eba req-a3b3d54e-6cdd-4cbf-94b9-9c1c37facfc6 service nova] Releasing lock "refresh_cache-81f0b79c-97b3-4a5d-a8fc-7c2250571177" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.853457] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8fa5d302-b0fb-41e4-93a6-e4003a21770f tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "de0f46af-870a-4095-a417-913a2c51f66b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.552s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.948347] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395502, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.972233] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.021905] env[61978]: DEBUG nova.compute.manager [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1153.023469] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18866ab4-f613-48d9-9aa6-a02ce60e855f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.043089] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395504, 'name': Rename_Task, 'duration_secs': 0.219276} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.044214] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1153.044214] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-473f7264-522d-44bd-a39c-22c7c3ded256 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.051507] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Waiting for the task: (returnval){ [ 1153.051507] env[61978]: value = "task-1395505" [ 1153.051507] env[61978]: _type = "Task" [ 1153.051507] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.061030] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395505, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.140844] env[61978]: INFO nova.compute.manager [-] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Took 1.04 seconds to deallocate network for instance. [ 1153.141186] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395499, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.191724] env[61978]: DEBUG oslo_concurrency.lockutils [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "a1087abd-28d1-40ac-96ab-dc38392d027c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.192424] env[61978]: DEBUG oslo_concurrency.lockutils [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "a1087abd-28d1-40ac-96ab-dc38392d027c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.192780] env[61978]: DEBUG oslo_concurrency.lockutils [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "a1087abd-28d1-40ac-96ab-dc38392d027c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.192936] env[61978]: DEBUG oslo_concurrency.lockutils [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "a1087abd-28d1-40ac-96ab-dc38392d027c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.193152] env[61978]: DEBUG oslo_concurrency.lockutils [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "a1087abd-28d1-40ac-96ab-dc38392d027c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.196776] env[61978]: INFO nova.compute.manager [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Terminating instance [ 1153.199944] env[61978]: DEBUG nova.compute.manager [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1153.200234] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1153.201809] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6c583b-2cbc-493b-b02c-2711b09ee037 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.213310] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1153.214049] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f9f5d3d-9594-4454-a477-806a8062e832 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.223496] env[61978]: DEBUG oslo_vmware.api [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1153.223496] env[61978]: value = "task-1395506" [ 1153.223496] env[61978]: _type = "Task" [ 1153.223496] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.235666] env[61978]: DEBUG oslo_vmware.api [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395506, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.452527] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395502, 'name': CreateVM_Task, 'duration_secs': 1.069934} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.452688] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1153.454068] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.454255] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.454587] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1153.458487] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73f08981-4231-4cbf-9782-57a4f95fd4fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.465321] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1153.465321] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f82df3-2a12-7a62-4d2e-2395f9c3132e" [ 1153.465321] env[61978]: _type = "Task" [ 1153.465321] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.480935] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.481849] env[61978]: DEBUG nova.network.neutron [req-0ae4e802-a6b7-41b2-ac03-d79756375904 req-0e699bc1-2cf0-48c1-abcc-ae9269ce2038 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updated VIF entry in instance network info cache for port fdf95a42-1379-4895-9a94-f8a8cf1d070d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1153.482245] env[61978]: DEBUG nova.network.neutron [req-0ae4e802-a6b7-41b2-ac03-d79756375904 req-0e699bc1-2cf0-48c1-abcc-ae9269ce2038 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updating instance_info_cache with network_info: [{"id": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "address": "fa:16:3e:0f:c0:2a", "network": {"id": "3e60c5f0-d590-4a22-a8bf-c0356ac4deb4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1865077723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86f4ae0b29af4ee2b33e5a499cf1e899", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdf95a42-13", "ovs_interfaceid": "fdf95a42-1379-4895-9a94-f8a8cf1d070d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.486466] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f82df3-2a12-7a62-4d2e-2395f9c3132e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.538858] env[61978]: INFO nova.compute.manager [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] instance snapshotting [ 1153.541689] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb44c58-bac7-4395-ae7e-5156acbe9462 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.568137] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073b82e0-6071-4905-8c79-60b624817b95 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.580648] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395505, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.633303] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb57a9c5-9663-420b-a9fa-e1328b6fc03e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.638645] env[61978]: DEBUG oslo_vmware.api [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395499, 'name': PowerOnVM_Task, 'duration_secs': 1.565638} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.639331] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1153.639516] env[61978]: INFO nova.compute.manager [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Took 11.17 seconds to spawn the instance on the hypervisor. [ 1153.639708] env[61978]: DEBUG nova.compute.manager [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1153.640510] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28af8ed3-abd2-49d2-b1e9-059073847a13 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.645839] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e3804c-e3df-461f-86b9-3a5769fbe38b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.650812] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.684998] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5996cf34-9d28-4f19-a605-ca753e883d4a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.694111] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9675bcc4-1779-4c47-93d0-477b27374614 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.709155] env[61978]: DEBUG nova.compute.provider_tree [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.734521] env[61978]: DEBUG oslo_vmware.api [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395506, 'name': PowerOffVM_Task, 'duration_secs': 0.362772} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.734791] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1153.735013] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1153.735298] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-40fc9ed7-ce26-4f03-ab5d-02af088e8d9e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.827424] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1153.827764] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1153.828069] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleting the datastore file [datastore2] a1087abd-28d1-40ac-96ab-dc38392d027c {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1153.828474] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97622b90-94c3-4b48-b0f0-c963334f333b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.836509] env[61978]: DEBUG oslo_vmware.api [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1153.836509] env[61978]: value = "task-1395508" [ 1153.836509] env[61978]: _type = "Task" [ 1153.836509] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.848735] env[61978]: DEBUG oslo_vmware.api [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395508, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.927732] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "612aba6c-a30d-4eeb-8f85-e791bda55582" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.927964] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "612aba6c-a30d-4eeb-8f85-e791bda55582" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.976314] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f82df3-2a12-7a62-4d2e-2395f9c3132e, 'name': SearchDatastore_Task, 'duration_secs': 0.028061} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.980039] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.980039] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1153.980165] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.980310] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.980498] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1153.980773] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.981010] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2fb9ce6-df06-40ed-8c03-8c0e7d410af3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.987765] env[61978]: DEBUG oslo_concurrency.lockutils [req-0ae4e802-a6b7-41b2-ac03-d79756375904 req-0e699bc1-2cf0-48c1-abcc-ae9269ce2038 service nova] Releasing lock "refresh_cache-9ee04ee8-98ec-4be9-935d-cad7cd176466" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.999150] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1153.999390] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1154.000266] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38ca9d37-2f6d-443f-a027-ce6f7c3f1b06 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.006756] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1154.006756] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ef455b-4632-7210-7cde-b940b2d7f7b4" [ 1154.006756] env[61978]: _type = "Task" [ 1154.006756] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.015938] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ef455b-4632-7210-7cde-b940b2d7f7b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.072042] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395505, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.082780] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1154.083160] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e87f93fc-97b1-45b2-956b-94c59d6ee40b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.091486] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1154.091486] env[61978]: value = "task-1395509" [ 1154.091486] env[61978]: _type = "Task" [ 1154.091486] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.101970] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395509, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.162955] env[61978]: INFO nova.compute.manager [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Took 26.20 seconds to build instance. [ 1154.212392] env[61978]: DEBUG nova.scheduler.client.report [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1154.348624] env[61978]: DEBUG oslo_vmware.api [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395508, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.314351} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.349281] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1154.349281] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1154.349422] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1154.349522] env[61978]: INFO nova.compute.manager [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1154.349729] env[61978]: DEBUG oslo.service.loopingcall [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1154.350291] env[61978]: DEBUG nova.compute.manager [-] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1154.350291] env[61978]: DEBUG nova.network.neutron [-] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1154.430628] env[61978]: DEBUG nova.compute.manager [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1154.479881] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.518780] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ef455b-4632-7210-7cde-b940b2d7f7b4, 'name': SearchDatastore_Task, 'duration_secs': 0.012457} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.520024] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-735df78e-62cf-4652-b8f4-c7f13d44cf13 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.526742] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1154.526742] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]525993aa-4086-f751-2196-73ed0558e6ff" [ 1154.526742] env[61978]: _type = "Task" [ 1154.526742] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.537255] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]525993aa-4086-f751-2196-73ed0558e6ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.576340] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395505, 'name': PowerOnVM_Task} progress is 87%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.603043] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395509, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.667616] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff55b80b-bbc5-4df9-863b-c664c46a7537 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lock "1eae10e8-58b1-435d-86fc-0674725ce6cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.717s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.717019] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.717610] env[61978]: DEBUG nova.compute.manager [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1154.722390] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.692s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.722390] env[61978]: DEBUG nova.objects.instance [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lazy-loading 'resources' on Instance uuid 35a6d3ec-8688-43c2-93c4-b23033aaf280 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1154.813915] env[61978]: DEBUG nova.compute.manager [req-ca67f032-3f0d-4930-801c-ca1c3242ca72 req-5b967798-1d4e-4264-8aa4-1980f6c8faea service nova] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Received event network-vif-deleted-c09a5182-eea7-4874-aa47-480a81863dd3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1154.814201] env[61978]: INFO nova.compute.manager [req-ca67f032-3f0d-4930-801c-ca1c3242ca72 req-5b967798-1d4e-4264-8aa4-1980f6c8faea service nova] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Neutron deleted interface c09a5182-eea7-4874-aa47-480a81863dd3; detaching it from the instance and deleting it from the info cache [ 1154.814389] env[61978]: DEBUG nova.network.neutron [req-ca67f032-3f0d-4930-801c-ca1c3242ca72 req-5b967798-1d4e-4264-8aa4-1980f6c8faea service nova] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.962940] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.975734] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.041398] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]525993aa-4086-f751-2196-73ed0558e6ff, 'name': SearchDatastore_Task, 'duration_secs': 0.019874} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.041398] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.041398] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 81f0b79c-97b3-4a5d-a8fc-7c2250571177/81f0b79c-97b3-4a5d-a8fc-7c2250571177.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1155.041398] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a383e45-1a0d-42e8-92cb-62a657139687 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.049055] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1155.049055] env[61978]: value = "task-1395510" [ 1155.049055] env[61978]: _type = "Task" [ 1155.049055] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.057557] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.074316] env[61978]: DEBUG oslo_vmware.api [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395505, 'name': PowerOnVM_Task, 'duration_secs': 1.994783} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.077295] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1155.077295] env[61978]: INFO nova.compute.manager [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Took 10.17 seconds to spawn the instance on the hypervisor. [ 1155.077295] env[61978]: DEBUG nova.compute.manager [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1155.077295] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a162e637-3842-4851-9db9-9975728524f3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.103089] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395509, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.224089] env[61978]: DEBUG nova.compute.utils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1155.229024] env[61978]: DEBUG nova.compute.manager [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1155.229024] env[61978]: DEBUG nova.network.neutron [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1155.290753] env[61978]: DEBUG nova.network.neutron [-] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.319035] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8735ff28-3861-479d-9338-e98f7fc6c466 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.332038] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b565350-5e7f-4f66-af0d-afac144b8efd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.348407] env[61978]: DEBUG nova.policy [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7026a28592af41ebb4dd7df6cfa33feb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2af733ffc4384fa1a2c59f4a45f1778c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1155.375553] env[61978]: DEBUG nova.compute.manager [req-ca67f032-3f0d-4930-801c-ca1c3242ca72 req-5b967798-1d4e-4264-8aa4-1980f6c8faea service nova] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Detach interface failed, port_id=c09a5182-eea7-4874-aa47-480a81863dd3, reason: Instance a1087abd-28d1-40ac-96ab-dc38392d027c could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1155.481175] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.532199] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0167204-7e30-47df-b17b-2cc2ffa92d36 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.548275] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b9d434-1eb4-4da3-aaaa-729e995ca5bb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.559965] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.586219] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da55dff-6b94-4864-a635-ae1aac58d9bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.598592] env[61978]: INFO nova.compute.manager [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Took 25.36 seconds to build instance. [ 1155.603409] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284f239f-5094-4cbb-9791-feaae1909933 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.610583] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395509, 'name': CreateSnapshot_Task, 'duration_secs': 1.029705} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.611233] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1155.612147] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2193e3-14cb-49f0-abd7-0763abe2e6cd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.622678] env[61978]: DEBUG nova.compute.provider_tree [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.728995] env[61978]: DEBUG nova.compute.manager [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1155.793062] env[61978]: INFO nova.compute.manager [-] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Took 1.44 seconds to deallocate network for instance. [ 1155.977620] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.064343] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.074383] env[61978]: DEBUG nova.network.neutron [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Successfully created port: d2d39b09-4acd-4f24-aa07-31e86f78f134 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1156.103212] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b95db689-5d2d-4c7f-a35d-cf92ca6d78d9 tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Lock "7d388d5c-2120-4dc5-a04f-5394e1e6f852" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.874s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.129160] env[61978]: DEBUG nova.scheduler.client.report [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1156.145345] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1156.147831] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2cbf20d5-9013-4c72-8b7a-31e2159839e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.159803] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1156.159803] env[61978]: value = "task-1395511" [ 1156.159803] env[61978]: _type = "Task" [ 1156.159803] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.172987] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395511, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.300847] env[61978]: DEBUG oslo_concurrency.lockutils [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1156.482532] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.565590] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395510, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.612413] env[61978]: DEBUG nova.compute.manager [req-277c711f-2d7e-49e1-ad42-251b6b80f485 req-185fa01a-f408-4a20-88a6-6aaa376ca03c service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Received event network-changed-50f09356-baf0-487b-a1f9-4cdc359c1daf {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1156.612413] env[61978]: DEBUG nova.compute.manager [req-277c711f-2d7e-49e1-ad42-251b6b80f485 req-185fa01a-f408-4a20-88a6-6aaa376ca03c service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Refreshing instance network info cache due to event network-changed-50f09356-baf0-487b-a1f9-4cdc359c1daf. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1156.612557] env[61978]: DEBUG oslo_concurrency.lockutils [req-277c711f-2d7e-49e1-ad42-251b6b80f485 req-185fa01a-f408-4a20-88a6-6aaa376ca03c service nova] Acquiring lock "refresh_cache-7d388d5c-2120-4dc5-a04f-5394e1e6f852" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.613259] env[61978]: DEBUG oslo_concurrency.lockutils [req-277c711f-2d7e-49e1-ad42-251b6b80f485 req-185fa01a-f408-4a20-88a6-6aaa376ca03c service nova] Acquired lock "refresh_cache-7d388d5c-2120-4dc5-a04f-5394e1e6f852" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.613259] env[61978]: DEBUG nova.network.neutron [req-277c711f-2d7e-49e1-ad42-251b6b80f485 req-185fa01a-f408-4a20-88a6-6aaa376ca03c service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Refreshing network info cache for port 50f09356-baf0-487b-a1f9-4cdc359c1daf {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1156.634671] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.913s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.637463] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.366s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1156.637790] env[61978]: DEBUG nova.objects.instance [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lazy-loading 'resources' on Instance uuid 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1156.673976] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395511, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.675191] env[61978]: INFO nova.scheduler.client.report [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Deleted allocations for instance 35a6d3ec-8688-43c2-93c4-b23033aaf280 [ 1156.739480] env[61978]: DEBUG nova.compute.manager [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1156.775034] env[61978]: DEBUG nova.virt.hardware [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1156.775034] env[61978]: DEBUG nova.virt.hardware [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1156.775034] env[61978]: DEBUG nova.virt.hardware [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1156.775034] env[61978]: DEBUG nova.virt.hardware [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1156.775034] env[61978]: DEBUG nova.virt.hardware [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1156.775034] env[61978]: DEBUG nova.virt.hardware [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1156.776257] env[61978]: DEBUG nova.virt.hardware [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1156.776737] env[61978]: DEBUG nova.virt.hardware [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1156.777276] env[61978]: DEBUG nova.virt.hardware [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1156.777623] env[61978]: DEBUG nova.virt.hardware [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1156.777947] env[61978]: DEBUG nova.virt.hardware [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1156.780334] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bbcad5e-3b83-43c6-9693-85d8a5c6c574 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.790884] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148749fc-17df-4e68-8033-b47568c55a2d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.840846] env[61978]: DEBUG nova.compute.manager [req-f4d68d14-662c-4d5f-80da-d42531ff8988 req-583d0705-6def-4810-9c40-3d507192e81f service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Received event network-changed-63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1156.841700] env[61978]: DEBUG nova.compute.manager [req-f4d68d14-662c-4d5f-80da-d42531ff8988 req-583d0705-6def-4810-9c40-3d507192e81f service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Refreshing instance network info cache due to event network-changed-63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1156.842117] env[61978]: DEBUG oslo_concurrency.lockutils [req-f4d68d14-662c-4d5f-80da-d42531ff8988 req-583d0705-6def-4810-9c40-3d507192e81f service nova] Acquiring lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.842331] env[61978]: DEBUG oslo_concurrency.lockutils [req-f4d68d14-662c-4d5f-80da-d42531ff8988 req-583d0705-6def-4810-9c40-3d507192e81f service nova] Acquired lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.842546] env[61978]: DEBUG nova.network.neutron [req-f4d68d14-662c-4d5f-80da-d42531ff8988 req-583d0705-6def-4810-9c40-3d507192e81f service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Refreshing network info cache for port 63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1156.979347] env[61978]: DEBUG oslo_vmware.api [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395494, 'name': ReconfigVM_Task, 'duration_secs': 5.849981} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.980201] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1156.980498] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Reconfigured VM to detach interface {{(pid=61978) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1157.066453] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395510, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.90061} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.066453] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 81f0b79c-97b3-4a5d-a8fc-7c2250571177/81f0b79c-97b3-4a5d-a8fc-7c2250571177.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1157.066897] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1157.066897] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a706e149-7d6a-4dd3-aee2-48285c4407b2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.076042] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1157.076042] env[61978]: value = "task-1395512" [ 1157.076042] env[61978]: _type = "Task" [ 1157.076042] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.086266] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395512, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.178370] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395511, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.188619] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b14b7fca-c883-434b-995b-f214e676c282 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "35a6d3ec-8688-43c2-93c4-b23033aaf280" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.444s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.500694] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5eb0c5-7682-4016-8f1d-aa2f3a3df2fb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.514474] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9b61bd-36e6-4561-9715-ba67a0646a9b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.544922] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1b1eb6-76fc-4b3c-a120-bee6478eb2e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.553912] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d90d4bc-2ff8-4aff-a58a-28ba78146e93 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.571578] env[61978]: DEBUG nova.compute.provider_tree [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.574690] env[61978]: DEBUG nova.network.neutron [req-277c711f-2d7e-49e1-ad42-251b6b80f485 req-185fa01a-f408-4a20-88a6-6aaa376ca03c service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Updated VIF entry in instance network info cache for port 50f09356-baf0-487b-a1f9-4cdc359c1daf. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1157.575081] env[61978]: DEBUG nova.network.neutron [req-277c711f-2d7e-49e1-ad42-251b6b80f485 req-185fa01a-f408-4a20-88a6-6aaa376ca03c service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Updating instance_info_cache with network_info: [{"id": "50f09356-baf0-487b-a1f9-4cdc359c1daf", "address": "fa:16:3e:21:8d:76", "network": {"id": "019b4bfd-7ed1-461e-bb1b-c82147395ef2", "bridge": "br-int", "label": "tempest-ServersTestJSON-566740792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad349fcfb78f46f0be51dfa32f635c59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b356db78-99c7-4464-822c-fc7e193f7878", "external-id": "nsx-vlan-transportzone-231", "segmentation_id": 231, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f09356-ba", "ovs_interfaceid": "50f09356-baf0-487b-a1f9-4cdc359c1daf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.588674] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395512, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093124} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.588674] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1157.588674] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b09bb00-70f2-4ae0-add9-2e7b03f93739 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.614712] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 81f0b79c-97b3-4a5d-a8fc-7c2250571177/81f0b79c-97b3-4a5d-a8fc-7c2250571177.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1157.615017] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dfc4d231-e63e-47cc-9d19-a4e94ce2a731 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.638247] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1157.638247] env[61978]: value = "task-1395513" [ 1157.638247] env[61978]: _type = "Task" [ 1157.638247] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.654055] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395513, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.672489] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395511, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.725646] env[61978]: DEBUG nova.network.neutron [req-f4d68d14-662c-4d5f-80da-d42531ff8988 req-583d0705-6def-4810-9c40-3d507192e81f service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Updated VIF entry in instance network info cache for port 63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1157.726061] env[61978]: DEBUG nova.network.neutron [req-f4d68d14-662c-4d5f-80da-d42531ff8988 req-583d0705-6def-4810-9c40-3d507192e81f service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Updating instance_info_cache with network_info: [{"id": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "address": "fa:16:3e:79:9a:37", "network": {"id": "74e44162-dbb7-4a19-b344-6133915c4ab5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-893790941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a1c323dddcd42809d565f46ecf5e18f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ba2eb7-45", "ovs_interfaceid": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.802762] env[61978]: DEBUG nova.network.neutron [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Successfully updated port: d2d39b09-4acd-4f24-aa07-31e86f78f134 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1158.074769] env[61978]: DEBUG nova.scheduler.client.report [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1158.081376] env[61978]: DEBUG oslo_concurrency.lockutils [req-277c711f-2d7e-49e1-ad42-251b6b80f485 req-185fa01a-f408-4a20-88a6-6aaa376ca03c service nova] Releasing lock "refresh_cache-7d388d5c-2120-4dc5-a04f-5394e1e6f852" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1158.152177] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395513, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.172330] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395511, 'name': CloneVM_Task, 'duration_secs': 1.794674} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.172607] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Created linked-clone VM from snapshot [ 1158.173419] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e197997f-1395-4f9e-8d1e-7624ba73e695 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.181515] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Uploading image 677e6f0a-1530-48d1-93bd-28a6cf05e9f6 {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1158.195204] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1158.195565] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-afaf8d6d-b97e-4a62-bf53-f651c59523f5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.204727] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1158.204727] env[61978]: value = "task-1395514" [ 1158.204727] env[61978]: _type = "Task" [ 1158.204727] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.214506] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395514, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.229966] env[61978]: DEBUG oslo_concurrency.lockutils [req-f4d68d14-662c-4d5f-80da-d42531ff8988 req-583d0705-6def-4810-9c40-3d507192e81f service nova] Releasing lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1158.305176] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1158.305388] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.305488] env[61978]: DEBUG nova.network.neutron [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1158.431668] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1158.431935] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.432192] env[61978]: DEBUG nova.network.neutron [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1158.470597] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.470872] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.471105] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.471316] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.471494] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.473962] env[61978]: INFO nova.compute.manager [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Terminating instance [ 1158.476136] env[61978]: DEBUG nova.compute.manager [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1158.476406] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1158.477297] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb185d7c-bf6f-423e-9e2b-116c12eb39f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.486625] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1158.486937] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df4e4e98-df97-4298-b94a-17667ffd4180 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.495504] env[61978]: DEBUG oslo_vmware.api [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1158.495504] env[61978]: value = "task-1395515" [ 1158.495504] env[61978]: _type = "Task" [ 1158.495504] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.504458] env[61978]: DEBUG oslo_vmware.api [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395515, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.579927] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.942s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.583323] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.576s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.609033] env[61978]: INFO nova.scheduler.client.report [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted allocations for instance 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d [ 1158.641021] env[61978]: DEBUG nova.compute.manager [req-c453fecf-dd7c-46c8-8769-99b7679bedbd req-ca9bf941-6b08-42ca-9612-e374d49b08e5 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Received event network-vif-deleted-3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1158.641388] env[61978]: INFO nova.compute.manager [req-c453fecf-dd7c-46c8-8769-99b7679bedbd req-ca9bf941-6b08-42ca-9612-e374d49b08e5 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Neutron deleted interface 3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04; detaching it from the instance and deleting it from the info cache [ 1158.641801] env[61978]: DEBUG nova.network.neutron [req-c453fecf-dd7c-46c8-8769-99b7679bedbd req-ca9bf941-6b08-42ca-9612-e374d49b08e5 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Updating instance_info_cache with network_info: [{"id": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "address": "fa:16:3e:62:14:fb", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb04a501f-29", "ovs_interfaceid": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.654233] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395513, 'name': ReconfigVM_Task, 'duration_secs': 1.005279} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.654525] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 81f0b79c-97b3-4a5d-a8fc-7c2250571177/81f0b79c-97b3-4a5d-a8fc-7c2250571177.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1158.655745] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de1b19ea-4ced-4682-828f-35f01f31eab4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.666062] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1158.666062] env[61978]: value = "task-1395516" [ 1158.666062] env[61978]: _type = "Task" [ 1158.666062] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.675595] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395516, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.715694] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395514, 'name': Destroy_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.837136] env[61978]: DEBUG nova.network.neutron [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1158.900709] env[61978]: DEBUG nova.compute.manager [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Received event network-vif-plugged-d2d39b09-4acd-4f24-aa07-31e86f78f134 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1158.900993] env[61978]: DEBUG oslo_concurrency.lockutils [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] Acquiring lock "d3c82821-0617-4de6-8109-813a67910ed1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.901271] env[61978]: DEBUG oslo_concurrency.lockutils [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] Lock "d3c82821-0617-4de6-8109-813a67910ed1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.901459] env[61978]: DEBUG oslo_concurrency.lockutils [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] Lock "d3c82821-0617-4de6-8109-813a67910ed1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.901639] env[61978]: DEBUG nova.compute.manager [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] No waiting events found dispatching network-vif-plugged-d2d39b09-4acd-4f24-aa07-31e86f78f134 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1158.901843] env[61978]: WARNING nova.compute.manager [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Received unexpected event network-vif-plugged-d2d39b09-4acd-4f24-aa07-31e86f78f134 for instance with vm_state building and task_state spawning. [ 1158.901999] env[61978]: DEBUG nova.compute.manager [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Received event network-changed-d2d39b09-4acd-4f24-aa07-31e86f78f134 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1158.902496] env[61978]: DEBUG nova.compute.manager [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Refreshing instance network info cache due to event network-changed-d2d39b09-4acd-4f24-aa07-31e86f78f134. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1158.902746] env[61978]: DEBUG oslo_concurrency.lockutils [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] Acquiring lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.008160] env[61978]: DEBUG oslo_vmware.api [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395515, 'name': PowerOffVM_Task, 'duration_secs': 0.345441} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.009899] env[61978]: DEBUG nova.network.neutron [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance_info_cache with network_info: [{"id": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "address": "fa:16:3e:3b:95:21", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2d39b09-4a", "ovs_interfaceid": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.011322] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1159.011551] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1159.015103] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50501e95-af08-418d-aefc-68859f3a10d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.109204] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1159.109204] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1159.109512] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Deleting the datastore file [datastore2] 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1159.109549] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d013ec2e-5580-4520-9219-eab3c8f0b856 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.120268] env[61978]: DEBUG oslo_vmware.api [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1159.120268] env[61978]: value = "task-1395518" [ 1159.120268] env[61978]: _type = "Task" [ 1159.120268] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.120612] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4badffe4-0511-48be-8422-c47b6dea9a70 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.488s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.130598] env[61978]: DEBUG oslo_vmware.api [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395518, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.145603] env[61978]: DEBUG oslo_concurrency.lockutils [req-c453fecf-dd7c-46c8-8769-99b7679bedbd req-ca9bf941-6b08-42ca-9612-e374d49b08e5 service nova] Acquiring lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.177015] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395516, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.188172] env[61978]: INFO nova.network.neutron [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Port 3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1159.188549] env[61978]: DEBUG nova.network.neutron [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Updating instance_info_cache with network_info: [{"id": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "address": "fa:16:3e:62:14:fb", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb04a501f-29", "ovs_interfaceid": "b04a501f-29a8-442a-9f2c-dddd76f5e335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.217106] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395514, 'name': Destroy_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.515816] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1159.516280] env[61978]: DEBUG nova.compute.manager [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Instance network_info: |[{"id": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "address": "fa:16:3e:3b:95:21", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2d39b09-4a", "ovs_interfaceid": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1159.516661] env[61978]: DEBUG oslo_concurrency.lockutils [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] Acquired lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.516913] env[61978]: DEBUG nova.network.neutron [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Refreshing network info cache for port d2d39b09-4acd-4f24-aa07-31e86f78f134 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1159.518326] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:95:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac7039c0-3374-4c08-87fc-af2449b48b02', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2d39b09-4acd-4f24-aa07-31e86f78f134', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1159.526872] env[61978]: DEBUG oslo.service.loopingcall [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1159.530235] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1159.531157] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b995f618-7bbf-4e32-aa79-b1dffaae2ee3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.555026] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1159.555026] env[61978]: value = "task-1395519" [ 1159.555026] env[61978]: _type = "Task" [ 1159.555026] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.565158] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395519, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.619258] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 243e7146-46fc-43f4-a83b-cdc58f397f9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.619473] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.619629] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.619732] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance bdfdd685-e440-4f53-b6c4-2ee2f06acba8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.619850] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.619965] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 9ee04ee8-98ec-4be9-935d-cad7cd176466 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.620126] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance a1087abd-28d1-40ac-96ab-dc38392d027c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1159.620243] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 17c56c1c-9992-4559-ad23-c68909ae6792 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.620355] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance ae6b92bb-6f79-4b52-bdb7-095985bf2fad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.620480] env[61978]: WARNING nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 38e4f039-20bc-4bed-b449-227bde070ed9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1159.620976] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.620976] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 94665d8c-df88-4ad0-bb90-547ace2d6345 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.620976] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 1eae10e8-58b1-435d-86fc-0674725ce6cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.620976] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 7d388d5c-2120-4dc5-a04f-5394e1e6f852 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.621172] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 81f0b79c-97b3-4a5d-a8fc-7c2250571177 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.621172] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance d3c82821-0617-4de6-8109-813a67910ed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1159.633480] env[61978]: DEBUG oslo_vmware.api [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395518, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.494799} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.634406] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1159.634606] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1159.634786] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1159.634965] env[61978]: INFO nova.compute.manager [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1159.635227] env[61978]: DEBUG oslo.service.loopingcall [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1159.635431] env[61978]: DEBUG nova.compute.manager [-] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1159.635525] env[61978]: DEBUG nova.network.neutron [-] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1159.676672] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395516, 'name': Rename_Task, 'duration_secs': 0.57195} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.676918] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1159.677251] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac2b81cd-1d88-44c4-a281-382702314555 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.690015] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1159.690015] env[61978]: value = "task-1395520" [ 1159.690015] env[61978]: _type = "Task" [ 1159.690015] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.690552] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1159.700976] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.716031] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395514, 'name': Destroy_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.762261] env[61978]: DEBUG nova.network.neutron [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updated VIF entry in instance network info cache for port d2d39b09-4acd-4f24-aa07-31e86f78f134. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1159.762686] env[61978]: DEBUG nova.network.neutron [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance_info_cache with network_info: [{"id": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "address": "fa:16:3e:3b:95:21", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2d39b09-4a", "ovs_interfaceid": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.067173] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395519, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.127262] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 612aba6c-a30d-4eeb-8f85-e791bda55582 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1160.127262] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1160.127262] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1160.197498] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9c06413-44e1-43a8-b230-813626ed0c61 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-9bee3e66-93b5-4c0f-bb46-8fbd78c312c0-3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.842s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.205496] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395520, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.215368] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395514, 'name': Destroy_Task, 'duration_secs': 1.7385} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.215657] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Destroyed the VM [ 1160.215977] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1160.216270] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-47eb76ce-ff50-40d5-a740-df1b5bf5a0f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.224338] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1160.224338] env[61978]: value = "task-1395521" [ 1160.224338] env[61978]: _type = "Task" [ 1160.224338] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.233390] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395521, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.272809] env[61978]: DEBUG oslo_concurrency.lockutils [req-bae7c740-413b-42ba-9c05-df0f88a23f85 req-d320817c-41cd-4930-979f-d0b35281a3eb service nova] Releasing lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.402615] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b07c73-b756-43c8-8910-b4b8d89dbbc0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.412734] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc80024-8bc1-48f2-8638-58cacb37f489 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.452939] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.453425] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.455855] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a549ba25-7ff7-49cd-9c57-88ef91065f70 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.465644] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48fef92b-02c7-49d3-9320-c75103fa1944 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.484085] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.572046] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395519, 'name': CreateVM_Task, 'duration_secs': 0.780152} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.572298] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1160.573685] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1160.573937] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.574356] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1160.575571] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7d34ffe-1c50-4a68-b1f7-94f7f56e8f1a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.583122] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1160.583122] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b09ce1-aa0d-9253-aabd-aec4cedb67a3" [ 1160.583122] env[61978]: _type = "Task" [ 1160.583122] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.593426] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b09ce1-aa0d-9253-aabd-aec4cedb67a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.702117] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395520, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.734581] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395521, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.926263] env[61978]: DEBUG nova.compute.manager [req-187be967-4bab-4eb5-bfc3-061213d6ffd0 req-c0867dae-1478-497d-a62b-d33bc4e2a3d8 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Received event network-vif-deleted-b04a501f-29a8-442a-9f2c-dddd76f5e335 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1160.926557] env[61978]: INFO nova.compute.manager [req-187be967-4bab-4eb5-bfc3-061213d6ffd0 req-c0867dae-1478-497d-a62b-d33bc4e2a3d8 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Neutron deleted interface b04a501f-29a8-442a-9f2c-dddd76f5e335; detaching it from the instance and deleting it from the info cache [ 1160.926759] env[61978]: DEBUG nova.network.neutron [req-187be967-4bab-4eb5-bfc3-061213d6ffd0 req-c0867dae-1478-497d-a62b-d33bc4e2a3d8 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.959580] env[61978]: DEBUG nova.compute.manager [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1160.990029] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1161.064280] env[61978]: DEBUG nova.network.neutron [-] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.095218] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b09ce1-aa0d-9253-aabd-aec4cedb67a3, 'name': SearchDatastore_Task, 'duration_secs': 0.014884} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.096050] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1161.096050] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1161.096255] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1161.096597] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.096710] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1161.097218] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcd7490f-cfea-491c-9bad-48a1ed67d672 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.108486] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1161.108735] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1161.109647] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc6fef59-ad02-4bc3-bc3d-ca6166376158 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.116926] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1161.116926] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5241e3a3-11e6-68d5-5c4f-938c89ca35c6" [ 1161.116926] env[61978]: _type = "Task" [ 1161.116926] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.126360] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5241e3a3-11e6-68d5-5c4f-938c89ca35c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.203378] env[61978]: DEBUG oslo_vmware.api [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395520, 'name': PowerOnVM_Task, 'duration_secs': 1.441125} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.203846] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1161.204167] env[61978]: INFO nova.compute.manager [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Took 11.88 seconds to spawn the instance on the hypervisor. [ 1161.204478] env[61978]: DEBUG nova.compute.manager [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1161.205619] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11048dd0-c6a6-4ad8-842c-b2d38b028d68 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.236404] env[61978]: DEBUG oslo_vmware.api [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395521, 'name': RemoveSnapshot_Task, 'duration_secs': 0.684151} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.236677] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1161.429585] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f3f599d-4503-4048-88d4-3d0c924da68a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.439775] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad156a0a-284d-4340-a6e8-e9b1d308cd4a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.475155] env[61978]: DEBUG nova.compute.manager [req-187be967-4bab-4eb5-bfc3-061213d6ffd0 req-c0867dae-1478-497d-a62b-d33bc4e2a3d8 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Detach interface failed, port_id=b04a501f-29a8-442a-9f2c-dddd76f5e335, reason: Instance 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1161.493558] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.495576] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1161.495678] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.913s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.495957] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.845s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.496162] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.498134] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.535s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.499628] env[61978]: INFO nova.compute.claims [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1161.521348] env[61978]: INFO nova.scheduler.client.report [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Deleted allocations for instance 38e4f039-20bc-4bed-b449-227bde070ed9 [ 1161.567030] env[61978]: INFO nova.compute.manager [-] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Took 1.93 seconds to deallocate network for instance. [ 1161.630152] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5241e3a3-11e6-68d5-5c4f-938c89ca35c6, 'name': SearchDatastore_Task, 'duration_secs': 0.015432} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.630970] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89b1b24e-21bb-4306-9cef-8ae9d0abc555 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.637105] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1161.637105] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cbf1f0-efe9-7006-e873-efabb10a63a3" [ 1161.637105] env[61978]: _type = "Task" [ 1161.637105] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.645553] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cbf1f0-efe9-7006-e873-efabb10a63a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.727871] env[61978]: INFO nova.compute.manager [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Took 21.50 seconds to build instance. [ 1161.742565] env[61978]: WARNING nova.compute.manager [None req-826a6d49-a5a2-4967-b26e-02f38554cfa9 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Image not found during snapshot: nova.exception.ImageNotFound: Image 677e6f0a-1530-48d1-93bd-28a6cf05e9f6 could not be found. [ 1162.028891] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6e906ec-50e8-4b3a-8abf-6e693adfac6b tempest-ServerDiagnosticsV248Test-291789393 tempest-ServerDiagnosticsV248Test-291789393-project-member] Lock "38e4f039-20bc-4bed-b449-227bde070ed9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.133s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.073761] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.150053] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cbf1f0-efe9-7006-e873-efabb10a63a3, 'name': SearchDatastore_Task, 'duration_secs': 0.031006} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.151106] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1162.151532] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] d3c82821-0617-4de6-8109-813a67910ed1/d3c82821-0617-4de6-8109-813a67910ed1.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1162.151780] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fdf59002-ab42-4b79-b50c-aa4d82e55a64 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.160137] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1162.160137] env[61978]: value = "task-1395522" [ 1162.160137] env[61978]: _type = "Task" [ 1162.160137] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.169851] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395522, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.230641] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6cf23655-b311-4bc0-a3ec-a75f7b47ad26 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "81f0b79c-97b3-4a5d-a8fc-7c2250571177" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.015s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.315814] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "94665d8c-df88-4ad0-bb90-547ace2d6345" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.316124] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "94665d8c-df88-4ad0-bb90-547ace2d6345" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.316397] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "94665d8c-df88-4ad0-bb90-547ace2d6345-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.316601] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "94665d8c-df88-4ad0-bb90-547ace2d6345-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.316798] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "94665d8c-df88-4ad0-bb90-547ace2d6345-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.319151] env[61978]: INFO nova.compute.manager [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Terminating instance [ 1162.320988] env[61978]: DEBUG nova.compute.manager [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1162.321213] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1162.322033] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a597c6-4c8f-477f-ae3e-2b71ba799f0f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.331286] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1162.331811] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92600dab-ec32-4b0d-a7d9-7e85c6ee9f6a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.340123] env[61978]: DEBUG oslo_vmware.api [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1162.340123] env[61978]: value = "task-1395523" [ 1162.340123] env[61978]: _type = "Task" [ 1162.340123] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.349838] env[61978]: DEBUG oslo_vmware.api [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.455378] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.455378] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.675753] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395522, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.762914] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5deb643d-1b22-4f7e-9c4b-a786fd6fef77 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.775939] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9781d26d-34b4-4a0a-93bb-11d3296a86a4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.812323] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8ec982-f2ee-4685-94f6-ccc37270754d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.821210] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357c3b52-12f6-4485-8301-85484cde1f02 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.835981] env[61978]: DEBUG nova.compute.provider_tree [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1162.850134] env[61978]: DEBUG oslo_vmware.api [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395523, 'name': PowerOffVM_Task, 'duration_secs': 0.222504} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.850417] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1162.850596] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1162.850849] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91421a1b-2ca9-4a24-915f-eaa153bfdde2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.955374] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1162.955739] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1162.956025] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Deleting the datastore file [datastore1] 94665d8c-df88-4ad0-bb90-547ace2d6345 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1162.956433] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a792b9c6-cd37-4149-8eb1-5681cd82b382 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.960172] env[61978]: DEBUG nova.compute.utils [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1162.968177] env[61978]: DEBUG oslo_vmware.api [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1162.968177] env[61978]: value = "task-1395525" [ 1162.968177] env[61978]: _type = "Task" [ 1162.968177] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.980463] env[61978]: DEBUG oslo_vmware.api [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.171471] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395522, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.700107} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.171729] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] d3c82821-0617-4de6-8109-813a67910ed1/d3c82821-0617-4de6-8109-813a67910ed1.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1163.172179] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1163.172246] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe903d45-cded-4453-91c9-89149c858ba6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.181023] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1163.181023] env[61978]: value = "task-1395526" [ 1163.181023] env[61978]: _type = "Task" [ 1163.181023] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.190697] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395526, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.339635] env[61978]: DEBUG nova.scheduler.client.report [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1163.464244] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.479642] env[61978]: DEBUG oslo_vmware.api [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.331205} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.480017] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1163.480291] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1163.480427] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1163.480634] env[61978]: INFO nova.compute.manager [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1163.481021] env[61978]: DEBUG oslo.service.loopingcall [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1163.481201] env[61978]: DEBUG nova.compute.manager [-] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1163.481349] env[61978]: DEBUG nova.network.neutron [-] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1163.577912] env[61978]: INFO nova.compute.manager [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Rebuilding instance [ 1163.626265] env[61978]: DEBUG nova.compute.manager [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1163.627244] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1831691b-a29d-4560-8bb5-8bc0ef2351b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.692498] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395526, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074746} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.692735] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1163.693574] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f333eb5a-2fa6-4ac1-b456-0b4f0ebd8722 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.716362] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] d3c82821-0617-4de6-8109-813a67910ed1/d3c82821-0617-4de6-8109-813a67910ed1.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1163.716992] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08b04acd-0723-41b0-ac98-3546a0fffdd6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.737255] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1163.737255] env[61978]: value = "task-1395527" [ 1163.737255] env[61978]: _type = "Task" [ 1163.737255] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.745966] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395527, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.845634] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.845856] env[61978]: DEBUG nova.compute.manager [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1163.850771] env[61978]: DEBUG oslo_concurrency.lockutils [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.548s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.850771] env[61978]: DEBUG oslo_concurrency.lockutils [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.852193] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.358s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.852532] env[61978]: INFO nova.compute.claims [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1163.885068] env[61978]: INFO nova.scheduler.client.report [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleted allocations for instance a1087abd-28d1-40ac-96ab-dc38392d027c [ 1164.112724] env[61978]: DEBUG nova.compute.manager [req-2a318484-b748-4324-99de-6b554b2d608e req-b59e7251-74e0-40fc-b5cf-6a6c0e720029 service nova] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Received event network-vif-deleted-25e2db6a-d281-4865-8d9b-8ae12370c2b9 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1164.113062] env[61978]: INFO nova.compute.manager [req-2a318484-b748-4324-99de-6b554b2d608e req-b59e7251-74e0-40fc-b5cf-6a6c0e720029 service nova] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Neutron deleted interface 25e2db6a-d281-4865-8d9b-8ae12370c2b9; detaching it from the instance and deleting it from the info cache [ 1164.113584] env[61978]: DEBUG nova.network.neutron [req-2a318484-b748-4324-99de-6b554b2d608e req-b59e7251-74e0-40fc-b5cf-6a6c0e720029 service nova] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.139301] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1164.140124] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21998698-247f-46af-86d7-da3662f3c539 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.150775] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1164.150775] env[61978]: value = "task-1395528" [ 1164.150775] env[61978]: _type = "Task" [ 1164.150775] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.161338] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395528, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.248998] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395527, 'name': ReconfigVM_Task, 'duration_secs': 0.463418} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.249326] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfigured VM instance instance-00000050 to attach disk [datastore2] d3c82821-0617-4de6-8109-813a67910ed1/d3c82821-0617-4de6-8109-813a67910ed1.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1164.249969] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-444c4636-70df-40ce-b196-23f14ad70234 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.258898] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1164.258898] env[61978]: value = "task-1395529" [ 1164.258898] env[61978]: _type = "Task" [ 1164.258898] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.268343] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395529, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.357178] env[61978]: DEBUG nova.compute.utils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1164.362704] env[61978]: DEBUG nova.compute.manager [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1164.362704] env[61978]: DEBUG nova.network.neutron [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1164.394702] env[61978]: DEBUG oslo_concurrency.lockutils [None req-563c246b-4e2f-4ce1-8082-25664522c3ee tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "a1087abd-28d1-40ac-96ab-dc38392d027c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.203s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.466731] env[61978]: DEBUG nova.network.neutron [-] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.486068] env[61978]: DEBUG nova.policy [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '394d03fc54234c369ad2e1255eee9c82', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c31ffdd4e70d40ecbbb56777f9422a52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1164.587583] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.587876] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.588142] env[61978]: INFO nova.compute.manager [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Attaching volume 861ed39b-543d-436d-a50d-364cfadf8e50 to /dev/sdb [ 1164.618922] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7339de14-e6d4-4f47-a728-dbafd89804c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.622952] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf24ad2-7457-452f-a949-56ca42695ab2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.632368] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26346955-7da8-41c3-9004-63b60145f6e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.637895] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4621c0-0c11-4486-bae9-85fbb2b55335 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.663373] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395528, 'name': PowerOffVM_Task, 'duration_secs': 0.300967} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.679707] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1164.680339] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1164.680804] env[61978]: DEBUG nova.virt.block_device [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Updating existing volume attachment record: f593d530-5926-45d5-bf56-687d63db3bb4 {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1164.683783] env[61978]: DEBUG nova.compute.manager [req-2a318484-b748-4324-99de-6b554b2d608e req-b59e7251-74e0-40fc-b5cf-6a6c0e720029 service nova] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Detach interface failed, port_id=25e2db6a-d281-4865-8d9b-8ae12370c2b9, reason: Instance 94665d8c-df88-4ad0-bb90-547ace2d6345 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1164.686346] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5035714-e4f3-4fb7-b3a3-bdda1ae0ffde {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.694890] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1164.695387] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fea0b63e-c5eb-4592-8319-385bf40ac832 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.769251] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395529, 'name': Rename_Task, 'duration_secs': 0.154951} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.769623] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1164.769935] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80e25567-1059-4e5a-9727-2f638a08f317 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.776392] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1164.776392] env[61978]: value = "task-1395531" [ 1164.776392] env[61978]: _type = "Task" [ 1164.776392] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.794310] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395531, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.861372] env[61978]: DEBUG nova.compute.manager [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1164.969615] env[61978]: INFO nova.compute.manager [-] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Took 1.49 seconds to deallocate network for instance. [ 1165.178721] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5db9397-18c2-408b-8046-872be7733b5c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.194876] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3acb58-3b5c-4595-9a53-d81120f59b10 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.233298] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70172c2-f371-4886-acae-db6c735aed11 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.242726] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3239b504-1b30-4f15-899c-97d3e944c3be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.259169] env[61978]: DEBUG nova.compute.provider_tree [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.288457] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395531, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.452984] env[61978]: DEBUG nova.network.neutron [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Successfully created port: 4a5f6d6c-8742-44fb-823a-a586923aaa5d {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1165.481146] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.763204] env[61978]: DEBUG nova.scheduler.client.report [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1165.789088] env[61978]: DEBUG oslo_vmware.api [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395531, 'name': PowerOnVM_Task, 'duration_secs': 0.654142} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.789376] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1165.789585] env[61978]: INFO nova.compute.manager [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Took 9.05 seconds to spawn the instance on the hypervisor. [ 1165.789770] env[61978]: DEBUG nova.compute.manager [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1165.790914] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da9144d-ed7e-48f4-8ad0-e5c45579e48e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.875669] env[61978]: DEBUG nova.compute.manager [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1165.904478] env[61978]: DEBUG nova.virt.hardware [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1165.905173] env[61978]: DEBUG nova.virt.hardware [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1165.905366] env[61978]: DEBUG nova.virt.hardware [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1165.905567] env[61978]: DEBUG nova.virt.hardware [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1165.905726] env[61978]: DEBUG nova.virt.hardware [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1165.905881] env[61978]: DEBUG nova.virt.hardware [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1165.906193] env[61978]: DEBUG nova.virt.hardware [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1165.906390] env[61978]: DEBUG nova.virt.hardware [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1165.906579] env[61978]: DEBUG nova.virt.hardware [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1165.906761] env[61978]: DEBUG nova.virt.hardware [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1165.906956] env[61978]: DEBUG nova.virt.hardware [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1165.907842] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae481f8-46ed-4364-b266-80b2d67df182 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.920235] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e9d658-62b7-44f7-ac4f-cd087df11f1c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.271191] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.419s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.271191] env[61978]: DEBUG nova.compute.manager [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1166.276784] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.203s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.277864] env[61978]: DEBUG nova.objects.instance [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'resources' on Instance uuid 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.315319] env[61978]: INFO nova.compute.manager [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Took 25.42 seconds to build instance. [ 1166.781126] env[61978]: DEBUG nova.compute.utils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1166.787844] env[61978]: DEBUG nova.compute.manager [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1166.788061] env[61978]: DEBUG nova.network.neutron [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1166.820261] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e1dd000e-808d-4d87-a46f-83f5bab260eb tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "d3c82821-0617-4de6-8109-813a67910ed1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.933s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.845058] env[61978]: DEBUG nova.policy [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8f50bac42274555ab08e047cdb028ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43ebac7c44604f55b94cbc06648f4908', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1167.047539] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1a708a-141f-440d-9ba6-7f4b2645f2f3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.057520] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733770e6-ef61-41fe-8846-deb9834e8034 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.091278] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ad3e33-c718-43ef-9aba-ec8315f1b34c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.103641] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686bce51-3979-470d-bbd2-c7cebe2ad4dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.120394] env[61978]: DEBUG nova.compute.provider_tree [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.172249] env[61978]: DEBUG nova.network.neutron [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Successfully created port: 2efcc135-18f4-45d3-9408-817cdbada770 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1167.290797] env[61978]: DEBUG nova.compute.manager [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1167.377407] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1167.377735] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1167.377897] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleting the datastore file [datastore1] 81f0b79c-97b3-4a5d-a8fc-7c2250571177 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1167.379326] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd4c6b1f-cbf9-478b-a4ef-907ae2511e3c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.392031] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1167.392031] env[61978]: value = "task-1395536" [ 1167.392031] env[61978]: _type = "Task" [ 1167.392031] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.400992] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.623919] env[61978]: DEBUG nova.scheduler.client.report [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1167.731133] env[61978]: DEBUG nova.compute.manager [req-8daf1c65-3d1b-4281-afdf-9023abcfb012 req-449f946a-690a-4a2c-a32a-c2728b9e9f3a service nova] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Received event network-vif-plugged-4a5f6d6c-8742-44fb-823a-a586923aaa5d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1167.731836] env[61978]: DEBUG oslo_concurrency.lockutils [req-8daf1c65-3d1b-4281-afdf-9023abcfb012 req-449f946a-690a-4a2c-a32a-c2728b9e9f3a service nova] Acquiring lock "612aba6c-a30d-4eeb-8f85-e791bda55582-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.732254] env[61978]: DEBUG oslo_concurrency.lockutils [req-8daf1c65-3d1b-4281-afdf-9023abcfb012 req-449f946a-690a-4a2c-a32a-c2728b9e9f3a service nova] Lock "612aba6c-a30d-4eeb-8f85-e791bda55582-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.732451] env[61978]: DEBUG oslo_concurrency.lockutils [req-8daf1c65-3d1b-4281-afdf-9023abcfb012 req-449f946a-690a-4a2c-a32a-c2728b9e9f3a service nova] Lock "612aba6c-a30d-4eeb-8f85-e791bda55582-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.732631] env[61978]: DEBUG nova.compute.manager [req-8daf1c65-3d1b-4281-afdf-9023abcfb012 req-449f946a-690a-4a2c-a32a-c2728b9e9f3a service nova] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] No waiting events found dispatching network-vif-plugged-4a5f6d6c-8742-44fb-823a-a586923aaa5d {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1167.732802] env[61978]: WARNING nova.compute.manager [req-8daf1c65-3d1b-4281-afdf-9023abcfb012 req-449f946a-690a-4a2c-a32a-c2728b9e9f3a service nova] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Received unexpected event network-vif-plugged-4a5f6d6c-8742-44fb-823a-a586923aaa5d for instance with vm_state building and task_state spawning. [ 1167.906021] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395536, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221397} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.906021] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1167.906021] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1167.906021] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1168.135026] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.856s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.135904] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.655s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.136352] env[61978]: DEBUG nova.objects.instance [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lazy-loading 'resources' on Instance uuid 94665d8c-df88-4ad0-bb90-547ace2d6345 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1168.161064] env[61978]: INFO nova.scheduler.client.report [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Deleted allocations for instance 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 [ 1168.198053] env[61978]: DEBUG nova.network.neutron [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Successfully updated port: 4a5f6d6c-8742-44fb-823a-a586923aaa5d {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1168.302405] env[61978]: DEBUG nova.compute.manager [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1168.336435] env[61978]: DEBUG nova.virt.hardware [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1168.336737] env[61978]: DEBUG nova.virt.hardware [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1168.336980] env[61978]: DEBUG nova.virt.hardware [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1168.337481] env[61978]: DEBUG nova.virt.hardware [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1168.337717] env[61978]: DEBUG nova.virt.hardware [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1168.338355] env[61978]: DEBUG nova.virt.hardware [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1168.338355] env[61978]: DEBUG nova.virt.hardware [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1168.338355] env[61978]: DEBUG nova.virt.hardware [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1168.338623] env[61978]: DEBUG nova.virt.hardware [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1168.338853] env[61978]: DEBUG nova.virt.hardware [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1168.339461] env[61978]: DEBUG nova.virt.hardware [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1168.340293] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb83ab6-2800-45c2-b64c-0ec8a074b1b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.350401] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10fc284-eab9-447e-9eff-cba2cde437ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.412394] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Acquiring lock "5d1d19d8-241b-41b8-b1c0-caf54f8fd600" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.412659] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Lock "5d1d19d8-241b-41b8-b1c0-caf54f8fd600" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.608632] env[61978]: DEBUG nova.compute.manager [req-e5171168-97f2-464a-8bf6-962c282fb028 req-77ff253c-47b7-4b25-9c0f-376eb935f4cc service nova] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Received event network-vif-plugged-2efcc135-18f4-45d3-9408-817cdbada770 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1168.609266] env[61978]: DEBUG oslo_concurrency.lockutils [req-e5171168-97f2-464a-8bf6-962c282fb028 req-77ff253c-47b7-4b25-9c0f-376eb935f4cc service nova] Acquiring lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.609266] env[61978]: DEBUG oslo_concurrency.lockutils [req-e5171168-97f2-464a-8bf6-962c282fb028 req-77ff253c-47b7-4b25-9c0f-376eb935f4cc service nova] Lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.609266] env[61978]: DEBUG oslo_concurrency.lockutils [req-e5171168-97f2-464a-8bf6-962c282fb028 req-77ff253c-47b7-4b25-9c0f-376eb935f4cc service nova] Lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.609577] env[61978]: DEBUG nova.compute.manager [req-e5171168-97f2-464a-8bf6-962c282fb028 req-77ff253c-47b7-4b25-9c0f-376eb935f4cc service nova] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] No waiting events found dispatching network-vif-plugged-2efcc135-18f4-45d3-9408-817cdbada770 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1168.609769] env[61978]: WARNING nova.compute.manager [req-e5171168-97f2-464a-8bf6-962c282fb028 req-77ff253c-47b7-4b25-9c0f-376eb935f4cc service nova] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Received unexpected event network-vif-plugged-2efcc135-18f4-45d3-9408-817cdbada770 for instance with vm_state building and task_state spawning. [ 1168.673041] env[61978]: DEBUG oslo_concurrency.lockutils [None req-44cfc217-d23f-47f1-8724-d643b37596ff tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.202s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.674069] env[61978]: DEBUG oslo_concurrency.lockutils [req-c453fecf-dd7c-46c8-8769-99b7679bedbd req-ca9bf941-6b08-42ca-9612-e374d49b08e5 service nova] Acquired lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.677243] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce214992-9d58-4b99-ab79-6cd884e1e601 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.685741] env[61978]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1168.685741] env[61978]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=61978) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1168.685741] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f41f9a3-7e64-49f2-a09d-11b870b6a5d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.700776] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9045b0ef-4828-472d-9d7b-5b8665cd001f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.724309] env[61978]: DEBUG nova.network.neutron [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Successfully updated port: 2efcc135-18f4-45d3-9408-817cdbada770 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1168.725872] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "refresh_cache-612aba6c-a30d-4eeb-8f85-e791bda55582" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.726052] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "refresh_cache-612aba6c-a30d-4eeb-8f85-e791bda55582" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.726201] env[61978]: DEBUG nova.network.neutron [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1168.756195] env[61978]: ERROR root [req-c453fecf-dd7c-46c8-8769-99b7679bedbd req-ca9bf941-6b08-42ca-9612-e374d49b08e5 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-295871' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-295871' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-295871' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-295871'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-295871' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-295871' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-295871'}\n"]: nova.exception.InstanceNotFound: Instance 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 could not be found. [ 1168.756195] env[61978]: DEBUG oslo_concurrency.lockutils [req-c453fecf-dd7c-46c8-8769-99b7679bedbd req-ca9bf941-6b08-42ca-9612-e374d49b08e5 service nova] Releasing lock "9bee3e66-93b5-4c0f-bb46-8fbd78c312c0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1168.756388] env[61978]: DEBUG nova.compute.manager [req-c453fecf-dd7c-46c8-8769-99b7679bedbd req-ca9bf941-6b08-42ca-9612-e374d49b08e5 service nova] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Detach interface failed, port_id=3ddcad7d-4ce0-48f2-adea-99d0e1cd2a04, reason: Instance 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1168.918943] env[61978]: DEBUG nova.compute.manager [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1168.927544] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc03b09d-397a-4403-be4b-90b53d9a20e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.935997] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd1e6e4-af0f-4f75-b78f-d379e39b157b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.976904] env[61978]: DEBUG nova.virt.hardware [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1168.977154] env[61978]: DEBUG nova.virt.hardware [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1168.977324] env[61978]: DEBUG nova.virt.hardware [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1168.977513] env[61978]: DEBUG nova.virt.hardware [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1168.977663] env[61978]: DEBUG nova.virt.hardware [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1168.977812] env[61978]: DEBUG nova.virt.hardware [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1168.978031] env[61978]: DEBUG nova.virt.hardware [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1168.978207] env[61978]: DEBUG nova.virt.hardware [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1168.978410] env[61978]: DEBUG nova.virt.hardware [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1168.980333] env[61978]: DEBUG nova.virt.hardware [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1168.980333] env[61978]: DEBUG nova.virt.hardware [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1168.980333] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4582f52-3187-4bbe-9b6c-d4739927be82 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.983385] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624ac4dd-ea2d-48ef-b4a5-9c108c1c90c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.997029] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed60b32c-f37b-4cbd-83fe-bb8cd6cfaef9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.002466] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c45e4276-1d99-4aa0-9b8d-86ca7e5dc417 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.017570] env[61978]: DEBUG nova.compute.provider_tree [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.027618] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:51:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5e0a55d-dd67-40cf-ad0c-76910a2013aa', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1169.035051] env[61978]: DEBUG oslo.service.loopingcall [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1169.036192] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1169.036534] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1f62a72-eaa6-4e56-a7c7-3a85d1acf99b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.057460] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1169.057460] env[61978]: value = "task-1395537" [ 1169.057460] env[61978]: _type = "Task" [ 1169.057460] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.066633] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395537, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.232233] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1169.232328] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.232485] env[61978]: DEBUG nova.network.neutron [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1169.267151] env[61978]: DEBUG nova.network.neutron [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1169.430972] env[61978]: DEBUG nova.network.neutron [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Updating instance_info_cache with network_info: [{"id": "4a5f6d6c-8742-44fb-823a-a586923aaa5d", "address": "fa:16:3e:74:fe:bd", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5f6d6c-87", "ovs_interfaceid": "4a5f6d6c-8742-44fb-823a-a586923aaa5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.459181] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.537474] env[61978]: DEBUG nova.scheduler.client.report [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1169.569304] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395537, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.745435] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1169.745633] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295980', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'name': 'volume-861ed39b-543d-436d-a50d-364cfadf8e50', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ae6b92bb-6f79-4b52-bdb7-095985bf2fad', 'attached_at': '', 'detached_at': '', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'serial': '861ed39b-543d-436d-a50d-364cfadf8e50'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1169.746676] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54001be0-4034-43d6-ad79-3f11854079a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.764900] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d7efb7-8253-4103-897a-e1db65b1efeb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.774022] env[61978]: DEBUG nova.compute.manager [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Received event network-changed-d2d39b09-4acd-4f24-aa07-31e86f78f134 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1169.774022] env[61978]: DEBUG nova.compute.manager [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Refreshing instance network info cache due to event network-changed-d2d39b09-4acd-4f24-aa07-31e86f78f134. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1169.774022] env[61978]: DEBUG oslo_concurrency.lockutils [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] Acquiring lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1169.774022] env[61978]: DEBUG oslo_concurrency.lockutils [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] Acquired lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.774022] env[61978]: DEBUG nova.network.neutron [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Refreshing network info cache for port d2d39b09-4acd-4f24-aa07-31e86f78f134 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1169.794874] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] volume-861ed39b-543d-436d-a50d-364cfadf8e50/volume-861ed39b-543d-436d-a50d-364cfadf8e50.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1169.800014] env[61978]: DEBUG nova.network.neutron [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1169.800014] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b543d3ef-5abd-4155-81a4-96d382abcc31 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.821381] env[61978]: DEBUG oslo_vmware.api [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1169.821381] env[61978]: value = "task-1395538" [ 1169.821381] env[61978]: _type = "Task" [ 1169.821381] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.832751] env[61978]: DEBUG oslo_vmware.api [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395538, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.936170] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "refresh_cache-612aba6c-a30d-4eeb-8f85-e791bda55582" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.936533] env[61978]: DEBUG nova.compute.manager [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Instance network_info: |[{"id": "4a5f6d6c-8742-44fb-823a-a586923aaa5d", "address": "fa:16:3e:74:fe:bd", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5f6d6c-87", "ovs_interfaceid": "4a5f6d6c-8742-44fb-823a-a586923aaa5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1169.937055] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:fe:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '411f389f-4e4f-4450-891e-38944cac6135', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a5f6d6c-8742-44fb-823a-a586923aaa5d', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1169.945402] env[61978]: DEBUG oslo.service.loopingcall [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1169.946020] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1169.946272] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-591ba78b-fb2b-49e8-aac5-2608ccf2de01 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.971333] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1169.971333] env[61978]: value = "task-1395539" [ 1169.971333] env[61978]: _type = "Task" [ 1169.971333] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.979387] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395539, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.021596] env[61978]: DEBUG nova.network.neutron [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance_info_cache with network_info: [{"id": "2efcc135-18f4-45d3-9408-817cdbada770", "address": "fa:16:3e:8c:cc:2c", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2efcc135-18", "ovs_interfaceid": "2efcc135-18f4-45d3-9408-817cdbada770", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.047664] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.912s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.050254] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.591s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.051814] env[61978]: INFO nova.compute.claims [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1170.068548] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395537, 'name': CreateVM_Task, 'duration_secs': 0.606104} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.068745] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1170.069559] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.069762] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.070209] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1170.070422] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42335eae-d36b-4d5b-800e-c1f76c29f2a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.073049] env[61978]: INFO nova.scheduler.client.report [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Deleted allocations for instance 94665d8c-df88-4ad0-bb90-547ace2d6345 [ 1170.083016] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1170.083016] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52471e75-ec3f-7fa0-149f-5f7ece125c77" [ 1170.083016] env[61978]: _type = "Task" [ 1170.083016] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.097324] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52471e75-ec3f-7fa0-149f-5f7ece125c77, 'name': SearchDatastore_Task, 'duration_secs': 0.010824} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.098107] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1170.098378] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1170.098653] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.098858] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.099106] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1170.099415] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82d47ed8-525b-4eb7-9527-de07fb16db16 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.109156] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1170.110116] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1170.110293] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c6f092d-5025-408b-99d6-266bca30376b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.119244] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1170.119244] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520f576f-c918-8f75-96f6-46875d9d9267" [ 1170.119244] env[61978]: _type = "Task" [ 1170.119244] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.129714] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520f576f-c918-8f75-96f6-46875d9d9267, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.069086] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1171.069417] env[61978]: DEBUG nova.compute.manager [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Instance network_info: |[{"id": "2efcc135-18f4-45d3-9408-817cdbada770", "address": "fa:16:3e:8c:cc:2c", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2efcc135-18", "ovs_interfaceid": "2efcc135-18f4-45d3-9408-817cdbada770", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1171.075152] env[61978]: DEBUG oslo_vmware.api [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395538, 'name': ReconfigVM_Task, 'duration_secs': 0.370102} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.078052] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e43422c9-767c-463c-ad54-cda6311fe239 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "94665d8c-df88-4ad0-bb90-547ace2d6345" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.762s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.086161] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:cc:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed3ffc1d-9f86-4029-857e-6cd1d383edbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2efcc135-18f4-45d3-9408-817cdbada770', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1171.091706] env[61978]: DEBUG oslo.service.loopingcall [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1171.096188] env[61978]: DEBUG nova.compute.manager [req-f4b99148-048d-42d7-9bf1-12f10cac6a38 req-4b6cb633-47a9-4fac-9798-1f823592ded0 service nova] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Received event network-changed-2efcc135-18f4-45d3-9408-817cdbada770 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1171.101822] env[61978]: DEBUG nova.compute.manager [req-f4b99148-048d-42d7-9bf1-12f10cac6a38 req-4b6cb633-47a9-4fac-9798-1f823592ded0 service nova] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Refreshing instance network info cache due to event network-changed-2efcc135-18f4-45d3-9408-817cdbada770. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1171.101822] env[61978]: DEBUG oslo_concurrency.lockutils [req-f4b99148-048d-42d7-9bf1-12f10cac6a38 req-4b6cb633-47a9-4fac-9798-1f823592ded0 service nova] Acquiring lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1171.101822] env[61978]: DEBUG oslo_concurrency.lockutils [req-f4b99148-048d-42d7-9bf1-12f10cac6a38 req-4b6cb633-47a9-4fac-9798-1f823592ded0 service nova] Acquired lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.101822] env[61978]: DEBUG nova.network.neutron [req-f4b99148-048d-42d7-9bf1-12f10cac6a38 req-4b6cb633-47a9-4fac-9798-1f823592ded0 service nova] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Refreshing network info cache for port 2efcc135-18f4-45d3-9408-817cdbada770 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1171.101822] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Reconfigured VM instance instance-00000046 to attach disk [datastore2] volume-861ed39b-543d-436d-a50d-364cfadf8e50/volume-861ed39b-543d-436d-a50d-364cfadf8e50.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1171.104319] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1171.104556] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2aaf20a9-618f-4c00-8506-6c01926f8e6c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.117044] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c039bf3-a82f-49af-b8d0-e3c1893a165d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.136038] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395539, 'name': CreateVM_Task, 'duration_secs': 0.374121} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.140467] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1171.141065] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520f576f-c918-8f75-96f6-46875d9d9267, 'name': SearchDatastore_Task, 'duration_secs': 0.010202} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.143205] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1171.143381] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.143933] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1171.144828] env[61978]: DEBUG oslo_vmware.api [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1171.144828] env[61978]: value = "task-1395540" [ 1171.144828] env[61978]: _type = "Task" [ 1171.144828] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.146108] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77ddacd9-12fd-4d7f-aaf0-410c25d8cd0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.148025] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e91e5660-5477-4e20-8cfe-1b06517a6f58 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.150411] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1171.150411] env[61978]: value = "task-1395541" [ 1171.150411] env[61978]: _type = "Task" [ 1171.150411] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.160228] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1171.160228] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5277c0ad-d542-6ceb-a190-d1ed6fdb9b17" [ 1171.160228] env[61978]: _type = "Task" [ 1171.160228] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.160543] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1171.160543] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52371def-b951-e648-745b-4acd06297db2" [ 1171.160543] env[61978]: _type = "Task" [ 1171.160543] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.175297] env[61978]: DEBUG oslo_vmware.api [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395540, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.175532] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395541, 'name': CreateVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.185010] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52371def-b951-e648-745b-4acd06297db2, 'name': SearchDatastore_Task, 'duration_secs': 0.010316} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.185296] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5277c0ad-d542-6ceb-a190-d1ed6fdb9b17, 'name': SearchDatastore_Task, 'duration_secs': 0.010013} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.185580] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1171.185795] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 81f0b79c-97b3-4a5d-a8fc-7c2250571177/81f0b79c-97b3-4a5d-a8fc-7c2250571177.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1171.186161] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1171.186390] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1171.186620] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1171.186769] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.186946] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1171.187262] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0bc30247-6bf7-446e-9e6e-a352624aa750 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.189743] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3608bad-e415-40a6-a59c-40ecb4b79be1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.201473] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1171.201473] env[61978]: value = "task-1395542" [ 1171.201473] env[61978]: _type = "Task" [ 1171.201473] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.205170] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1171.205421] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1171.208652] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d9cb9a4-211a-4642-bbc0-60e9766b3cab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.217883] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1171.217883] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c205f4-411d-8d12-278c-398dc80a96a6" [ 1171.217883] env[61978]: _type = "Task" [ 1171.217883] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.221560] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.231898] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c205f4-411d-8d12-278c-398dc80a96a6, 'name': SearchDatastore_Task, 'duration_secs': 0.010088} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.232815] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-491857cc-0de8-45fe-80c8-5c513550f31d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.239205] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1171.239205] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523198ce-8590-22b4-18ff-d2bafd8d4257" [ 1171.239205] env[61978]: _type = "Task" [ 1171.239205] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.249132] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523198ce-8590-22b4-18ff-d2bafd8d4257, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.379632] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.379893] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.407471] env[61978]: DEBUG nova.network.neutron [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updated VIF entry in instance network info cache for port d2d39b09-4acd-4f24-aa07-31e86f78f134. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1171.407930] env[61978]: DEBUG nova.network.neutron [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance_info_cache with network_info: [{"id": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "address": "fa:16:3e:3b:95:21", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2d39b09-4a", "ovs_interfaceid": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.674193] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395541, 'name': CreateVM_Task, 'duration_secs': 0.407749} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.674193] env[61978]: DEBUG oslo_vmware.api [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395540, 'name': ReconfigVM_Task, 'duration_secs': 0.191795} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.674193] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1171.674477] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295980', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'name': 'volume-861ed39b-543d-436d-a50d-364cfadf8e50', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ae6b92bb-6f79-4b52-bdb7-095985bf2fad', 'attached_at': '', 'detached_at': '', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'serial': '861ed39b-543d-436d-a50d-364cfadf8e50'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1171.677805] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1171.677805] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.677805] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1171.678036] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-667132cc-dfa3-41ff-8d91-247e3be87eee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.685668] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1171.685668] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]525791c4-4b2f-1218-d972-d64f19f2fc38" [ 1171.685668] env[61978]: _type = "Task" [ 1171.685668] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.702279] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]525791c4-4b2f-1218-d972-d64f19f2fc38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.714937] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395542, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.754575] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523198ce-8590-22b4-18ff-d2bafd8d4257, 'name': SearchDatastore_Task, 'duration_secs': 0.010159} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.754575] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1171.754575] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 612aba6c-a30d-4eeb-8f85-e791bda55582/612aba6c-a30d-4eeb-8f85-e791bda55582.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1171.754814] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2f2f430-e729-42e9-bc1f-c1f2dfd2149a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.762057] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1171.762057] env[61978]: value = "task-1395543" [ 1171.762057] env[61978]: _type = "Task" [ 1171.762057] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.771125] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395543, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.839428] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df16aa2-3795-4d77-bdb5-82ed8c4e0b65 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.851729] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9594fb8-f4ea-43a5-a970-95eb80f60459 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.855759] env[61978]: DEBUG nova.network.neutron [req-f4b99148-048d-42d7-9bf1-12f10cac6a38 req-4b6cb633-47a9-4fac-9798-1f823592ded0 service nova] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updated VIF entry in instance network info cache for port 2efcc135-18f4-45d3-9408-817cdbada770. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1171.856081] env[61978]: DEBUG nova.network.neutron [req-f4b99148-048d-42d7-9bf1-12f10cac6a38 req-4b6cb633-47a9-4fac-9798-1f823592ded0 service nova] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance_info_cache with network_info: [{"id": "2efcc135-18f4-45d3-9408-817cdbada770", "address": "fa:16:3e:8c:cc:2c", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2efcc135-18", "ovs_interfaceid": "2efcc135-18f4-45d3-9408-817cdbada770", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.886660] env[61978]: DEBUG nova.compute.manager [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1171.890668] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337d23d1-0e91-403f-8f56-2c8db4169ea4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.904850] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08095541-1fbc-4da0-9dd0-41358ded67ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.910356] env[61978]: DEBUG oslo_concurrency.lockutils [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] Releasing lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1171.910746] env[61978]: DEBUG nova.compute.manager [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Received event network-changed-4a5f6d6c-8742-44fb-823a-a586923aaa5d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1171.910746] env[61978]: DEBUG nova.compute.manager [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Refreshing instance network info cache due to event network-changed-4a5f6d6c-8742-44fb-823a-a586923aaa5d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1171.910914] env[61978]: DEBUG oslo_concurrency.lockutils [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] Acquiring lock "refresh_cache-612aba6c-a30d-4eeb-8f85-e791bda55582" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1171.911415] env[61978]: DEBUG oslo_concurrency.lockutils [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] Acquired lock "refresh_cache-612aba6c-a30d-4eeb-8f85-e791bda55582" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.911415] env[61978]: DEBUG nova.network.neutron [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Refreshing network info cache for port 4a5f6d6c-8742-44fb-823a-a586923aaa5d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1171.922900] env[61978]: DEBUG nova.compute.provider_tree [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.164299] env[61978]: DEBUG nova.network.neutron [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Updated VIF entry in instance network info cache for port 4a5f6d6c-8742-44fb-823a-a586923aaa5d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1172.164299] env[61978]: DEBUG nova.network.neutron [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Updating instance_info_cache with network_info: [{"id": "4a5f6d6c-8742-44fb-823a-a586923aaa5d", "address": "fa:16:3e:74:fe:bd", "network": {"id": "c8b3d2ab-6847-4747-9638-70d0aefd63da", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1816910772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c31ffdd4e70d40ecbbb56777f9422a52", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a5f6d6c-87", "ovs_interfaceid": "4a5f6d6c-8742-44fb-823a-a586923aaa5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.201113] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]525791c4-4b2f-1218-d972-d64f19f2fc38, 'name': SearchDatastore_Task, 'duration_secs': 0.025339} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.201526] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.201837] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1172.206100] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1172.206413] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.206669] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1172.207139] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6dd539a-09ac-471c-b2a8-a6c3aec933be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.226044] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395542, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530369} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.226397] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 81f0b79c-97b3-4a5d-a8fc-7c2250571177/81f0b79c-97b3-4a5d-a8fc-7c2250571177.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1172.226643] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1172.231588] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-51a24826-77c3-4f24-b80b-f2e2f6ad0532 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.235648] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1172.235875] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1172.237152] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43e09519-8e5c-47bb-a76c-a72da86a493a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.243243] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1172.243243] env[61978]: value = "task-1395544" [ 1172.243243] env[61978]: _type = "Task" [ 1172.243243] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.248991] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1172.248991] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aa4078-07b6-602c-62a3-69b744bf4e8d" [ 1172.248991] env[61978]: _type = "Task" [ 1172.248991] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.257721] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395544, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.261179] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aa4078-07b6-602c-62a3-69b744bf4e8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.272868] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395543, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.361249] env[61978]: DEBUG oslo_concurrency.lockutils [req-f4b99148-048d-42d7-9bf1-12f10cac6a38 req-4b6cb633-47a9-4fac-9798-1f823592ded0 service nova] Releasing lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.414490] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.427506] env[61978]: DEBUG nova.scheduler.client.report [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1172.667289] env[61978]: DEBUG oslo_concurrency.lockutils [req-6f239c1a-8926-40c9-b026-68fe873d0c73 req-8c25d8c2-fa7d-4c3a-b936-8958fb65170e service nova] Releasing lock "refresh_cache-612aba6c-a30d-4eeb-8f85-e791bda55582" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.737348] env[61978]: DEBUG nova.objects.instance [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lazy-loading 'flavor' on Instance uuid ae6b92bb-6f79-4b52-bdb7-095985bf2fad {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.756608] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395544, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090023} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.757317] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1172.758099] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e22384-07e4-496c-9141-c8a3ada3bf67 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.764031] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aa4078-07b6-602c-62a3-69b744bf4e8d, 'name': SearchDatastore_Task, 'duration_secs': 0.059374} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.777419] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65bcadc2-0ab2-4723-a248-7a2d953333ad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.789138] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 81f0b79c-97b3-4a5d-a8fc-7c2250571177/81f0b79c-97b3-4a5d-a8fc-7c2250571177.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1172.789138] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-667d9ddb-b902-4382-b8a0-1e7012865bc0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.806562] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395543, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.701406} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.807187] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 612aba6c-a30d-4eeb-8f85-e791bda55582/612aba6c-a30d-4eeb-8f85-e791bda55582.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1172.807441] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1172.807696] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-787e36db-30d0-4691-b6b6-81bff1b5bc2a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.810763] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1172.810763] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5248d42c-a56c-3b60-06e4-a040ea6a431b" [ 1172.810763] env[61978]: _type = "Task" [ 1172.810763] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.816015] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1172.816015] env[61978]: value = "task-1395545" [ 1172.816015] env[61978]: _type = "Task" [ 1172.816015] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.821019] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1172.821019] env[61978]: value = "task-1395546" [ 1172.821019] env[61978]: _type = "Task" [ 1172.821019] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.827283] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5248d42c-a56c-3b60-06e4-a040ea6a431b, 'name': SearchDatastore_Task, 'duration_secs': 0.01111} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.828036] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.828389] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2/b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1172.829037] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88a43a3b-12c2-448c-ad12-86245b83bfd5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.834648] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395545, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.838053] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395546, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.843007] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1172.843007] env[61978]: value = "task-1395547" [ 1172.843007] env[61978]: _type = "Task" [ 1172.843007] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.851944] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395547, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.935943] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.885s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1172.936663] env[61978]: DEBUG nova.compute.manager [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1172.939517] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.525s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1172.941027] env[61978]: INFO nova.compute.claims [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1173.055727] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "c0be687a-7444-4019-8b12-dac41a7c080e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.055833] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "c0be687a-7444-4019-8b12-dac41a7c080e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.243229] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd9efd17-6257-4ce1-bc83-ebc8cfd1b648 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.655s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.333867] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395546, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071671} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.337568] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1173.338016] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.338851] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c36615f-de72-4e69-a4fc-f33d5a836b1c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.369695] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 612aba6c-a30d-4eeb-8f85-e791bda55582/612aba6c-a30d-4eeb-8f85-e791bda55582.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1173.369695] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f6e1414-9c3e-4c43-a971-6ddc1a6df2b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.396197] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395547, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527047} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.398065] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2/b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1173.398351] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1173.398650] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1173.398650] env[61978]: value = "task-1395548" [ 1173.398650] env[61978]: _type = "Task" [ 1173.398650] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.398965] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3dd22cb3-97c4-4d69-99f4-85356a128f14 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.401707] env[61978]: INFO nova.compute.manager [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Rebuilding instance [ 1173.415944] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395548, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.418063] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1173.418063] env[61978]: value = "task-1395549" [ 1173.418063] env[61978]: _type = "Task" [ 1173.418063] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.431629] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395549, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.445707] env[61978]: DEBUG nova.compute.utils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1173.450938] env[61978]: DEBUG nova.compute.manager [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1173.451146] env[61978]: DEBUG nova.network.neutron [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1173.471848] env[61978]: DEBUG nova.compute.manager [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1173.473315] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20474b1-cf09-4184-a24f-167099236ae3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.497509] env[61978]: DEBUG nova.policy [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8523ad74c344bf9bc026e9b9be33653', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '379f498936ee4490995a99b66825b9ca', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1173.560953] env[61978]: DEBUG nova.compute.manager [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1173.784248] env[61978]: DEBUG nova.network.neutron [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Successfully created port: 52e402de-991f-4bee-bc93-9d0ac255d4b7 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1173.828497] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395545, 'name': ReconfigVM_Task, 'duration_secs': 0.633545} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.828738] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 81f0b79c-97b3-4a5d-a8fc-7c2250571177/81f0b79c-97b3-4a5d-a8fc-7c2250571177.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1173.829418] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f9b7051-7117-4601-856d-193a68168264 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.838199] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1173.838199] env[61978]: value = "task-1395550" [ 1173.838199] env[61978]: _type = "Task" [ 1173.838199] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.847898] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395550, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.911990] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395548, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.933062] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395549, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077478} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.933062] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1173.933062] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b7d465-02c6-4d7f-b05c-272a9b439f18 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.956661] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2/b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1173.957345] env[61978]: DEBUG nova.compute.manager [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1173.964151] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-265f8119-2ccf-4dd6-9968-b27cbd9a958a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.985139] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1173.985139] env[61978]: value = "task-1395551" [ 1173.985139] env[61978]: _type = "Task" [ 1173.985139] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.992040] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1173.996861] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-622171d2-5059-4a48-bcaa-059fe721bfc1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.000088] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395551, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.007409] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1174.007409] env[61978]: value = "task-1395552" [ 1174.007409] env[61978]: _type = "Task" [ 1174.007409] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.020530] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395552, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.071496] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "17c56c1c-9992-4559-ad23-c68909ae6792" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.071738] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "17c56c1c-9992-4559-ad23-c68909ae6792" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.071929] env[61978]: INFO nova.compute.manager [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Shelving [ 1174.088439] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.233595] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1bd471-b779-4216-90d5-7a5aefef6b8c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.242791] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac52f00f-ab1d-4d0a-8759-9f599799f199 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.274687] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fdb99c-4baa-49c9-ab7d-925005a9158d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.284192] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ac96b6-4b16-4272-aa11-787cb32c4b56 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.300617] env[61978]: DEBUG nova.compute.provider_tree [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1174.351648] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395550, 'name': Rename_Task, 'duration_secs': 0.277439} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.352515] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1174.352515] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db4d008d-e12c-4e4f-bbc8-7d2d50cf10dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.360436] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1174.360436] env[61978]: value = "task-1395553" [ 1174.360436] env[61978]: _type = "Task" [ 1174.360436] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.370378] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395553, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.412692] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395548, 'name': ReconfigVM_Task, 'duration_secs': 0.584423} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.412999] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 612aba6c-a30d-4eeb-8f85-e791bda55582/612aba6c-a30d-4eeb-8f85-e791bda55582.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1174.413665] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d655c0d9-7ce7-45b6-b71a-81ba5ff06355 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.420964] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1174.420964] env[61978]: value = "task-1395554" [ 1174.420964] env[61978]: _type = "Task" [ 1174.420964] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.428979] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395554, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.494801] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395551, 'name': ReconfigVM_Task, 'duration_secs': 0.397399} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.495094] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Reconfigured VM instance instance-00000052 to attach disk [datastore2] b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2/b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1174.495730] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6e42bf8-3da2-4ad7-b4bd-33c44bd950b4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.502723] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1174.502723] env[61978]: value = "task-1395555" [ 1174.502723] env[61978]: _type = "Task" [ 1174.502723] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.515302] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395555, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.521643] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395552, 'name': PowerOffVM_Task, 'duration_secs': 0.299362} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.522709] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1174.581939] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1174.582123] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-022dc29b-bc64-47e0-bfe6-102333bcd173 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.586243] env[61978]: INFO nova.compute.manager [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Detaching volume 861ed39b-543d-436d-a50d-364cfadf8e50 [ 1174.590760] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1174.590760] env[61978]: value = "task-1395556" [ 1174.590760] env[61978]: _type = "Task" [ 1174.590760] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.603067] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.627535] env[61978]: INFO nova.virt.block_device [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Attempting to driver detach volume 861ed39b-543d-436d-a50d-364cfadf8e50 from mountpoint /dev/sdb [ 1174.627892] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1174.628123] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295980', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'name': 'volume-861ed39b-543d-436d-a50d-364cfadf8e50', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ae6b92bb-6f79-4b52-bdb7-095985bf2fad', 'attached_at': '', 'detached_at': '', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'serial': '861ed39b-543d-436d-a50d-364cfadf8e50'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1174.629053] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9389b54-fb8b-4489-b9ef-58a918b4afed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.651591] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2a16ee-e5ba-478b-a44e-340672cd5afc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.662591] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf29fb55-b50f-4395-9052-a482ffddbce0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.687767] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89712f1-dc52-454a-afbf-bc2beeb722e2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.706090] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] The volume has not been displaced from its original location: [datastore2] volume-861ed39b-543d-436d-a50d-364cfadf8e50/volume-861ed39b-543d-436d-a50d-364cfadf8e50.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1174.711437] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Reconfiguring VM instance instance-00000046 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1174.711794] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08889149-9370-454d-a089-a0cf52f7021b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.734740] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1174.734740] env[61978]: value = "task-1395557" [ 1174.734740] env[61978]: _type = "Task" [ 1174.734740] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.743708] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395557, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.807494] env[61978]: DEBUG nova.scheduler.client.report [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1174.872317] env[61978]: DEBUG oslo_vmware.api [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395553, 'name': PowerOnVM_Task, 'duration_secs': 0.491401} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.872642] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1174.872904] env[61978]: DEBUG nova.compute.manager [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1174.873769] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1708f0ba-6446-43b7-950b-0092c50ecf8f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.931834] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395554, 'name': Rename_Task, 'duration_secs': 0.170623} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.932238] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1174.932616] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd416f1f-214b-443c-9aae-0a54d920158c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.941078] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1174.941078] env[61978]: value = "task-1395558" [ 1174.941078] env[61978]: _type = "Task" [ 1174.941078] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.949902] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395558, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.985144] env[61978]: DEBUG nova.compute.manager [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1175.016070] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395555, 'name': Rename_Task, 'duration_secs': 0.166728} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.018242] env[61978]: DEBUG nova.virt.hardware [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1175.018484] env[61978]: DEBUG nova.virt.hardware [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1175.018644] env[61978]: DEBUG nova.virt.hardware [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1175.018831] env[61978]: DEBUG nova.virt.hardware [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1175.018982] env[61978]: DEBUG nova.virt.hardware [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1175.019154] env[61978]: DEBUG nova.virt.hardware [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1175.019373] env[61978]: DEBUG nova.virt.hardware [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1175.019544] env[61978]: DEBUG nova.virt.hardware [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1175.019704] env[61978]: DEBUG nova.virt.hardware [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1175.019873] env[61978]: DEBUG nova.virt.hardware [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1175.020061] env[61978]: DEBUG nova.virt.hardware [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1175.020374] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1175.021121] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670ecd28-64e3-4108-94c6-31fc3f99a2ec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.023743] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3401bde9-4f5d-4bd1-ad7e-da8fa61e3d84 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.032708] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5443c51-72d4-426e-9280-f6215e5d631f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.036744] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1175.036744] env[61978]: value = "task-1395559" [ 1175.036744] env[61978]: _type = "Task" [ 1175.036744] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.055433] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395559, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.103943] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395556, 'name': PowerOffVM_Task, 'duration_secs': 0.273734} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.104567] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1175.105224] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3ed2e2-fd58-4582-8685-455d9c500f14 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.125710] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd875a4d-e140-4b21-9dfb-89219bd15303 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.236352] env[61978]: DEBUG nova.compute.manager [req-797698fb-b78b-4d02-9924-01c071f1b7a6 req-81a04480-02bf-4cda-bd04-c7204a414f8a service nova] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Received event network-vif-plugged-52e402de-991f-4bee-bc93-9d0ac255d4b7 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1175.236578] env[61978]: DEBUG oslo_concurrency.lockutils [req-797698fb-b78b-4d02-9924-01c071f1b7a6 req-81a04480-02bf-4cda-bd04-c7204a414f8a service nova] Acquiring lock "5d1d19d8-241b-41b8-b1c0-caf54f8fd600-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.236790] env[61978]: DEBUG oslo_concurrency.lockutils [req-797698fb-b78b-4d02-9924-01c071f1b7a6 req-81a04480-02bf-4cda-bd04-c7204a414f8a service nova] Lock "5d1d19d8-241b-41b8-b1c0-caf54f8fd600-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.236956] env[61978]: DEBUG oslo_concurrency.lockutils [req-797698fb-b78b-4d02-9924-01c071f1b7a6 req-81a04480-02bf-4cda-bd04-c7204a414f8a service nova] Lock "5d1d19d8-241b-41b8-b1c0-caf54f8fd600-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.237432] env[61978]: DEBUG nova.compute.manager [req-797698fb-b78b-4d02-9924-01c071f1b7a6 req-81a04480-02bf-4cda-bd04-c7204a414f8a service nova] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] No waiting events found dispatching network-vif-plugged-52e402de-991f-4bee-bc93-9d0ac255d4b7 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1175.237619] env[61978]: WARNING nova.compute.manager [req-797698fb-b78b-4d02-9924-01c071f1b7a6 req-81a04480-02bf-4cda-bd04-c7204a414f8a service nova] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Received unexpected event network-vif-plugged-52e402de-991f-4bee-bc93-9d0ac255d4b7 for instance with vm_state building and task_state spawning. [ 1175.248266] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395557, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.312230] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.313432] env[61978]: DEBUG nova.compute.manager [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1175.315866] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.228s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.317351] env[61978]: INFO nova.compute.claims [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1175.324019] env[61978]: DEBUG nova.network.neutron [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Successfully updated port: 52e402de-991f-4bee-bc93-9d0ac255d4b7 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1175.390728] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.452441] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395558, 'name': PowerOnVM_Task} progress is 87%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.548294] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395559, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.637397] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1175.638225] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6c58bf24-37a0-4ec6-9f2d-eb73b84c8e43 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.647906] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1175.647906] env[61978]: value = "task-1395560" [ 1175.647906] env[61978]: _type = "Task" [ 1175.647906] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.657660] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395560, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.747950] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395557, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.821718] env[61978]: DEBUG nova.compute.utils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1175.826565] env[61978]: DEBUG nova.compute.manager [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1175.826565] env[61978]: DEBUG nova.network.neutron [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1175.827520] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Acquiring lock "refresh_cache-5d1d19d8-241b-41b8-b1c0-caf54f8fd600" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1175.827854] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Acquired lock "refresh_cache-5d1d19d8-241b-41b8-b1c0-caf54f8fd600" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.827854] env[61978]: DEBUG nova.network.neutron [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1175.872782] env[61978]: DEBUG nova.policy [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '346f982562de48fab1702aca567113ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a4f29a959447159b2f7d194ea94873', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1175.953559] env[61978]: DEBUG oslo_vmware.api [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395558, 'name': PowerOnVM_Task, 'duration_secs': 0.649591} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.953903] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1175.954167] env[61978]: INFO nova.compute.manager [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Took 10.08 seconds to spawn the instance on the hypervisor. [ 1175.954403] env[61978]: DEBUG nova.compute.manager [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1175.955299] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a46765-16d6-4303-9559-0e98a3dba5b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.050575] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395559, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.094043] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "81f0b79c-97b3-4a5d-a8fc-7c2250571177" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.094445] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "81f0b79c-97b3-4a5d-a8fc-7c2250571177" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.094640] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "81f0b79c-97b3-4a5d-a8fc-7c2250571177-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.095315] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "81f0b79c-97b3-4a5d-a8fc-7c2250571177-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.095315] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "81f0b79c-97b3-4a5d-a8fc-7c2250571177-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.097260] env[61978]: INFO nova.compute.manager [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Terminating instance [ 1176.099074] env[61978]: DEBUG nova.compute.manager [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1176.099282] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1176.100111] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911849d4-f5c5-4c68-9b8b-796cf18b9725 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.108867] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1176.109574] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8493ccb4-f162-4ae0-b156-ad7fbd4eae79 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.119462] env[61978]: DEBUG oslo_vmware.api [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1176.119462] env[61978]: value = "task-1395561" [ 1176.119462] env[61978]: _type = "Task" [ 1176.119462] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.129239] env[61978]: DEBUG oslo_vmware.api [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395561, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.160310] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395560, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.187052] env[61978]: DEBUG nova.network.neutron [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Successfully created port: 3c5e24a1-8ef7-45a5-a39a-4ce790adc338 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1176.251458] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395557, 'name': ReconfigVM_Task, 'duration_secs': 1.264161} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.252388] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Reconfigured VM instance instance-00000046 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1176.258932] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f7b1bfe-811b-444b-b023-ee751c393a53 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.279264] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1176.279264] env[61978]: value = "task-1395562" [ 1176.279264] env[61978]: _type = "Task" [ 1176.279264] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.289614] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395562, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.325949] env[61978]: DEBUG nova.compute.manager [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1176.413407] env[61978]: DEBUG nova.network.neutron [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1176.478278] env[61978]: INFO nova.compute.manager [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Took 21.54 seconds to build instance. [ 1176.551037] env[61978]: DEBUG oslo_vmware.api [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395559, 'name': PowerOnVM_Task, 'duration_secs': 1.390014} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.551348] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1176.551591] env[61978]: INFO nova.compute.manager [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Took 8.25 seconds to spawn the instance on the hypervisor. [ 1176.551780] env[61978]: DEBUG nova.compute.manager [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1176.552593] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6385a6a9-e231-4141-b838-ce0293760c34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.606868] env[61978]: DEBUG nova.network.neutron [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Updating instance_info_cache with network_info: [{"id": "52e402de-991f-4bee-bc93-9d0ac255d4b7", "address": "fa:16:3e:29:e3:c0", "network": {"id": "41ed084a-3f34-494c-8090-8012e0e761b4", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-2038123933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "379f498936ee4490995a99b66825b9ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b89fd3b-0470-40c9-bb5b-d52c76c030e4", "external-id": "nsx-vlan-transportzone-276", "segmentation_id": 276, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52e402de-99", "ovs_interfaceid": "52e402de-991f-4bee-bc93-9d0ac255d4b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.628220] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304c74cc-8a86-486b-9de3-b0c9f861dfe9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.643822] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0b7526-a345-496c-931a-103489addce9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.648576] env[61978]: DEBUG oslo_vmware.api [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395561, 'name': PowerOffVM_Task, 'duration_secs': 0.194241} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.652715] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1176.652990] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1176.653461] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "68791dff-12e0-499d-8835-1e9173af570f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.653790] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "68791dff-12e0-499d-8835-1e9173af570f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.655537] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aafdea4f-31ee-4dc8-b813-160a2e2397d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.706072] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d36332-01ce-467a-b988-7c9cc8355c62 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.713901] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395560, 'name': CreateSnapshot_Task, 'duration_secs': 0.794113} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.714774] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1176.715865] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6965d278-55d5-4413-8a9b-868ada4364d2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.725181] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b7ec48-f006-44cc-8145-eba370cadb00 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.753188] env[61978]: DEBUG nova.compute.provider_tree [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1176.790028] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395562, 'name': ReconfigVM_Task, 'duration_secs': 0.202738} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.790354] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295980', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'name': 'volume-861ed39b-543d-436d-a50d-364cfadf8e50', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ae6b92bb-6f79-4b52-bdb7-095985bf2fad', 'attached_at': '', 'detached_at': '', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'serial': '861ed39b-543d-436d-a50d-364cfadf8e50'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1176.896578] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1176.897056] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1176.897279] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleting the datastore file [datastore2] 81f0b79c-97b3-4a5d-a8fc-7c2250571177 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1176.897620] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb709cf9-d9dd-4e6c-962e-3e3bfc6a12c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.904690] env[61978]: DEBUG oslo_vmware.api [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1176.904690] env[61978]: value = "task-1395564" [ 1176.904690] env[61978]: _type = "Task" [ 1176.904690] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.913129] env[61978]: DEBUG oslo_vmware.api [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395564, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.978327] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e6938125-54f6-44b6-9bc2-cdedfd64dc37 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "612aba6c-a30d-4eeb-8f85-e791bda55582" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.050s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.072171] env[61978]: INFO nova.compute.manager [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Took 15.60 seconds to build instance. [ 1177.111644] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Releasing lock "refresh_cache-5d1d19d8-241b-41b8-b1c0-caf54f8fd600" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1177.112030] env[61978]: DEBUG nova.compute.manager [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Instance network_info: |[{"id": "52e402de-991f-4bee-bc93-9d0ac255d4b7", "address": "fa:16:3e:29:e3:c0", "network": {"id": "41ed084a-3f34-494c-8090-8012e0e761b4", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-2038123933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "379f498936ee4490995a99b66825b9ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b89fd3b-0470-40c9-bb5b-d52c76c030e4", "external-id": "nsx-vlan-transportzone-276", "segmentation_id": 276, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52e402de-99", "ovs_interfaceid": "52e402de-991f-4bee-bc93-9d0ac255d4b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1177.112433] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:e3:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b89fd3b-0470-40c9-bb5b-d52c76c030e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52e402de-991f-4bee-bc93-9d0ac255d4b7', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1177.120150] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Creating folder: Project (379f498936ee4490995a99b66825b9ca). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1177.120732] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f5157d6-db9c-4e78-b3c6-ff8b47e4f80e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.135030] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Created folder: Project (379f498936ee4490995a99b66825b9ca) in parent group-v295764. [ 1177.135262] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Creating folder: Instances. Parent ref: group-v295985. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1177.135524] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc192ce0-7fae-4bc8-8c48-32d27f2cb345 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.148546] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Created folder: Instances in parent group-v295985. [ 1177.148825] env[61978]: DEBUG oslo.service.loopingcall [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1177.149053] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1177.149290] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6dfc21e-805e-4b0a-96c8-710c600010ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.169410] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1177.169410] env[61978]: value = "task-1395567" [ 1177.169410] env[61978]: _type = "Task" [ 1177.169410] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.177582] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395567, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.202443] env[61978]: DEBUG nova.compute.manager [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1177.248254] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1177.248806] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4ac39feb-a2e2-4f89-bbad-f70f1b864652 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.256601] env[61978]: DEBUG nova.scheduler.client.report [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1177.267623] env[61978]: DEBUG nova.compute.manager [req-e257a161-3024-40dd-9fed-e645978f2f64 req-e75ae46c-ee59-4231-a165-bce2549a7b85 service nova] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Received event network-changed-52e402de-991f-4bee-bc93-9d0ac255d4b7 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1177.267623] env[61978]: DEBUG nova.compute.manager [req-e257a161-3024-40dd-9fed-e645978f2f64 req-e75ae46c-ee59-4231-a165-bce2549a7b85 service nova] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Refreshing instance network info cache due to event network-changed-52e402de-991f-4bee-bc93-9d0ac255d4b7. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1177.267745] env[61978]: DEBUG oslo_concurrency.lockutils [req-e257a161-3024-40dd-9fed-e645978f2f64 req-e75ae46c-ee59-4231-a165-bce2549a7b85 service nova] Acquiring lock "refresh_cache-5d1d19d8-241b-41b8-b1c0-caf54f8fd600" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1177.267886] env[61978]: DEBUG oslo_concurrency.lockutils [req-e257a161-3024-40dd-9fed-e645978f2f64 req-e75ae46c-ee59-4231-a165-bce2549a7b85 service nova] Acquired lock "refresh_cache-5d1d19d8-241b-41b8-b1c0-caf54f8fd600" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.268064] env[61978]: DEBUG nova.network.neutron [req-e257a161-3024-40dd-9fed-e645978f2f64 req-e75ae46c-ee59-4231-a165-bce2549a7b85 service nova] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Refreshing network info cache for port 52e402de-991f-4bee-bc93-9d0ac255d4b7 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1177.271331] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1177.271331] env[61978]: value = "task-1395568" [ 1177.271331] env[61978]: _type = "Task" [ 1177.271331] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.282140] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395568, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.343553] env[61978]: DEBUG nova.compute.manager [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1177.367611] env[61978]: DEBUG nova.virt.hardware [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1177.367894] env[61978]: DEBUG nova.virt.hardware [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1177.368209] env[61978]: DEBUG nova.virt.hardware [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1177.368477] env[61978]: DEBUG nova.virt.hardware [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1177.368748] env[61978]: DEBUG nova.virt.hardware [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1177.369041] env[61978]: DEBUG nova.virt.hardware [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1177.369359] env[61978]: DEBUG nova.virt.hardware [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1177.369641] env[61978]: DEBUG nova.virt.hardware [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1177.369886] env[61978]: DEBUG nova.virt.hardware [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1177.370199] env[61978]: DEBUG nova.virt.hardware [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1177.370527] env[61978]: DEBUG nova.virt.hardware [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1177.372932] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eecbdc90-19a8-48a1-b1fb-52b4429e5035 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.383480] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7529bf3-a7af-4c4c-9b44-00666a036635 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.416657] env[61978]: DEBUG oslo_vmware.api [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395564, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189237} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.416999] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1177.417267] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1177.417488] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1177.417714] env[61978]: INFO nova.compute.manager [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1177.418037] env[61978]: DEBUG oslo.service.loopingcall [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1177.418559] env[61978]: DEBUG nova.compute.manager [-] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1177.418654] env[61978]: DEBUG nova.network.neutron [-] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1177.574332] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f58cd982-d6bd-448d-a6c4-d5ab220b7641 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.120s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.682670] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395567, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.743593] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.762853] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.763455] env[61978]: DEBUG nova.compute.manager [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1177.766577] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.376s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.766817] env[61978]: DEBUG nova.objects.instance [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61978) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1177.788263] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395568, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.848226] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1177.848653] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd0597f7-2846-414e-8094-5ec1ac57cfe0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.858020] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1177.858020] env[61978]: value = "task-1395569" [ 1177.858020] env[61978]: _type = "Task" [ 1177.858020] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.871426] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1177.871717] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1177.872388] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295980', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'name': 'volume-861ed39b-543d-436d-a50d-364cfadf8e50', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ae6b92bb-6f79-4b52-bdb7-095985bf2fad', 'attached_at': '', 'detached_at': '', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'serial': '861ed39b-543d-436d-a50d-364cfadf8e50'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1177.872711] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b555a8-385f-4687-b4ac-7335bde52d6c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.899043] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e65d15-03dc-40a6-aa5e-12a57d3c2548 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.906852] env[61978]: WARNING nova.virt.vmwareapi.driver [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1177.907228] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1177.908049] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6425c97-ed67-455e-a755-f28bed945c44 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.917126] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1177.917126] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4b55fa6-71cb-4652-9af0-ede3e8cb5b93 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.999438] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1177.999873] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1178.000060] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleting the datastore file [datastore1] ae6b92bb-6f79-4b52-bdb7-095985bf2fad {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1178.000373] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43ca523e-66ea-47e7-8520-f81bb673be88 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.017356] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1178.017356] env[61978]: value = "task-1395571" [ 1178.017356] env[61978]: _type = "Task" [ 1178.017356] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.026485] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395571, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.086894] env[61978]: DEBUG nova.network.neutron [req-e257a161-3024-40dd-9fed-e645978f2f64 req-e75ae46c-ee59-4231-a165-bce2549a7b85 service nova] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Updated VIF entry in instance network info cache for port 52e402de-991f-4bee-bc93-9d0ac255d4b7. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1178.087316] env[61978]: DEBUG nova.network.neutron [req-e257a161-3024-40dd-9fed-e645978f2f64 req-e75ae46c-ee59-4231-a165-bce2549a7b85 service nova] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Updating instance_info_cache with network_info: [{"id": "52e402de-991f-4bee-bc93-9d0ac255d4b7", "address": "fa:16:3e:29:e3:c0", "network": {"id": "41ed084a-3f34-494c-8090-8012e0e761b4", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-2038123933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "379f498936ee4490995a99b66825b9ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b89fd3b-0470-40c9-bb5b-d52c76c030e4", "external-id": "nsx-vlan-transportzone-276", "segmentation_id": 276, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52e402de-99", "ovs_interfaceid": "52e402de-991f-4bee-bc93-9d0ac255d4b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.150192] env[61978]: DEBUG nova.compute.manager [req-3ec2105f-6afe-460f-9382-5007c7974905 req-c93fa8a6-c4b7-48e8-86c0-30a373b97099 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received event network-vif-plugged-3c5e24a1-8ef7-45a5-a39a-4ce790adc338 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1178.150468] env[61978]: DEBUG oslo_concurrency.lockutils [req-3ec2105f-6afe-460f-9382-5007c7974905 req-c93fa8a6-c4b7-48e8-86c0-30a373b97099 service nova] Acquiring lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.150727] env[61978]: DEBUG oslo_concurrency.lockutils [req-3ec2105f-6afe-460f-9382-5007c7974905 req-c93fa8a6-c4b7-48e8-86c0-30a373b97099 service nova] Lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.150956] env[61978]: DEBUG oslo_concurrency.lockutils [req-3ec2105f-6afe-460f-9382-5007c7974905 req-c93fa8a6-c4b7-48e8-86c0-30a373b97099 service nova] Lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.151969] env[61978]: DEBUG nova.compute.manager [req-3ec2105f-6afe-460f-9382-5007c7974905 req-c93fa8a6-c4b7-48e8-86c0-30a373b97099 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] No waiting events found dispatching network-vif-plugged-3c5e24a1-8ef7-45a5-a39a-4ce790adc338 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1178.153153] env[61978]: WARNING nova.compute.manager [req-3ec2105f-6afe-460f-9382-5007c7974905 req-c93fa8a6-c4b7-48e8-86c0-30a373b97099 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received unexpected event network-vif-plugged-3c5e24a1-8ef7-45a5-a39a-4ce790adc338 for instance with vm_state building and task_state spawning. [ 1178.174218] env[61978]: DEBUG nova.compute.manager [req-f273500d-88a5-4b5a-8829-4278847160f8 req-4f5ba54c-0cc4-4043-8d64-1393e94f8bfe service nova] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Received event network-vif-deleted-d5e0a55d-dd67-40cf-ad0c-76910a2013aa {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1178.174218] env[61978]: INFO nova.compute.manager [req-f273500d-88a5-4b5a-8829-4278847160f8 req-4f5ba54c-0cc4-4043-8d64-1393e94f8bfe service nova] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Neutron deleted interface d5e0a55d-dd67-40cf-ad0c-76910a2013aa; detaching it from the instance and deleting it from the info cache [ 1178.174809] env[61978]: DEBUG nova.network.neutron [req-f273500d-88a5-4b5a-8829-4278847160f8 req-4f5ba54c-0cc4-4043-8d64-1393e94f8bfe service nova] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.189635] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395567, 'name': CreateVM_Task, 'duration_secs': 0.624943} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.189847] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1178.190395] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.190581] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.190942] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1178.191437] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1da24402-6b57-4499-b36c-18b25e1877c2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.197582] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Waiting for the task: (returnval){ [ 1178.197582] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5205f7f0-a95b-7609-3055-dbb58d5a530d" [ 1178.197582] env[61978]: _type = "Task" [ 1178.197582] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.206664] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5205f7f0-a95b-7609-3055-dbb58d5a530d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.276919] env[61978]: DEBUG nova.compute.utils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1178.279198] env[61978]: DEBUG nova.compute.manager [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1178.279606] env[61978]: DEBUG nova.compute.manager [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1178.279770] env[61978]: DEBUG nova.network.neutron [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1178.287463] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6f14b1-d30c-4aff-92f7-8d4f901f2fe1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.298641] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395568, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.349328] env[61978]: DEBUG nova.network.neutron [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Successfully updated port: 3c5e24a1-8ef7-45a5-a39a-4ce790adc338 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1178.357565] env[61978]: DEBUG nova.policy [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9580f2ba2f244d8c9950bbe509c7c9ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d4d29d9b6a74b4887684c7b310280b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1178.529489] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395571, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159511} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.529846] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1178.530093] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1178.530357] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1178.571824] env[61978]: DEBUG nova.network.neutron [-] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.590239] env[61978]: DEBUG oslo_concurrency.lockutils [req-e257a161-3024-40dd-9fed-e645978f2f64 req-e75ae46c-ee59-4231-a165-bce2549a7b85 service nova] Releasing lock "refresh_cache-5d1d19d8-241b-41b8-b1c0-caf54f8fd600" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.629840] env[61978]: DEBUG nova.network.neutron [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Successfully created port: 52cdfaa1-00dd-4eed-94aa-d186999d0614 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1178.679243] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79a2aafd-6376-4312-b89a-2b0733198534 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.689098] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5a0a8c-4af9-431e-a70f-0dda821e95e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.709386] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5205f7f0-a95b-7609-3055-dbb58d5a530d, 'name': SearchDatastore_Task, 'duration_secs': 0.010743} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.709969] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.710063] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1178.712225] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.712225] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.712225] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1178.712225] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3815e09f-a031-49b3-b7c5-42be685d94ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.729715] env[61978]: DEBUG nova.compute.manager [req-f273500d-88a5-4b5a-8829-4278847160f8 req-4f5ba54c-0cc4-4043-8d64-1393e94f8bfe service nova] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Detach interface failed, port_id=d5e0a55d-dd67-40cf-ad0c-76910a2013aa, reason: Instance 81f0b79c-97b3-4a5d-a8fc-7c2250571177 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1178.731463] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1178.732178] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1178.733031] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc543034-e785-4273-a70c-d35c04cd0826 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.739952] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Waiting for the task: (returnval){ [ 1178.739952] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cdbfcc-7c55-0c16-ce1e-9f3271240a1d" [ 1178.739952] env[61978]: _type = "Task" [ 1178.739952] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.748541] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cdbfcc-7c55-0c16-ce1e-9f3271240a1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.782021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bdad437f-a6af-492b-b75e-990ea8130da0 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.782021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.038s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.784073] env[61978]: INFO nova.compute.claims [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1178.790697] env[61978]: DEBUG nova.compute.manager [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1178.801234] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395568, 'name': CloneVM_Task, 'duration_secs': 1.258428} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.801659] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Created linked-clone VM from snapshot [ 1178.802582] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c410e60-ce64-4cc2-9d62-134a69c190ec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.806159] env[61978]: INFO nova.compute.manager [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] instance snapshotting [ 1178.809251] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da373cf4-ef07-4f06-a33c-e203b4fddcc2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.820513] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Uploading image 443a8916-4f98-4cb9-9e27-49dd792e901d {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1178.838930] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07fcded-2b6a-4bc3-a951-183f94c8ce45 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.854516] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.854516] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.854516] env[61978]: DEBUG nova.network.neutron [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1178.863581] env[61978]: DEBUG oslo_vmware.rw_handles [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1178.863581] env[61978]: value = "vm-295988" [ 1178.863581] env[61978]: _type = "VirtualMachine" [ 1178.863581] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1178.863881] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4d2e2653-0876-4ee3-81f6-e74d91684c0e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.872870] env[61978]: DEBUG oslo_vmware.rw_handles [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lease: (returnval){ [ 1178.872870] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5217ce22-420f-6e35-85a6-33dac676f6da" [ 1178.872870] env[61978]: _type = "HttpNfcLease" [ 1178.872870] env[61978]: } obtained for exporting VM: (result){ [ 1178.872870] env[61978]: value = "vm-295988" [ 1178.872870] env[61978]: _type = "VirtualMachine" [ 1178.872870] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1178.873219] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the lease: (returnval){ [ 1178.873219] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5217ce22-420f-6e35-85a6-33dac676f6da" [ 1178.873219] env[61978]: _type = "HttpNfcLease" [ 1178.873219] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1178.880908] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1178.880908] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5217ce22-420f-6e35-85a6-33dac676f6da" [ 1178.880908] env[61978]: _type = "HttpNfcLease" [ 1178.880908] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1179.025636] env[61978]: DEBUG nova.compute.manager [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Stashing vm_state: active {{(pid=61978) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1179.035819] env[61978]: INFO nova.virt.block_device [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Booting with volume 861ed39b-543d-436d-a50d-364cfadf8e50 at /dev/sdb [ 1179.073624] env[61978]: INFO nova.compute.manager [-] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Took 1.65 seconds to deallocate network for instance. [ 1179.073957] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-369bc49b-e284-45d8-a955-47ab1d90e80e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.092249] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eed4e57-1f75-41b8-ab95-d73bf93feb93 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.127758] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-500220f2-c25a-4ea9-b896-051279fc4fa1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.137299] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fba5230-9365-460d-b9c8-42c7b3805ed0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.175256] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde08c87-1e9d-41a2-bd3a-96fc61194f8f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.183383] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e274ce-360b-4b4c-98c1-da852c9b247b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.199849] env[61978]: DEBUG nova.virt.block_device [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Updating existing volume attachment record: f7498c52-cf52-470e-b4d9-9996d6ec6c99 {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1179.252186] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cdbfcc-7c55-0c16-ce1e-9f3271240a1d, 'name': SearchDatastore_Task, 'duration_secs': 0.011564} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.252186] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-500d1b6c-c30f-4df7-b44f-3c52bc126967 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.258246] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Waiting for the task: (returnval){ [ 1179.258246] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ea4e94-a628-0ba5-2eed-2fd5424dc2eb" [ 1179.258246] env[61978]: _type = "Task" [ 1179.258246] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.270240] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ea4e94-a628-0ba5-2eed-2fd5424dc2eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.354730] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1179.354730] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a4ecedad-e8b4-4e5a-8273-ac129f85cb1c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.366978] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1179.366978] env[61978]: value = "task-1395573" [ 1179.366978] env[61978]: _type = "Task" [ 1179.366978] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.377779] env[61978]: DEBUG nova.compute.manager [req-00cf50f5-f03b-4ead-84c6-f2a7485b92d0 req-a50ef6a3-e247-4058-8375-c7817660f7b3 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received event network-changed-3c5e24a1-8ef7-45a5-a39a-4ce790adc338 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1179.378197] env[61978]: DEBUG nova.compute.manager [req-00cf50f5-f03b-4ead-84c6-f2a7485b92d0 req-a50ef6a3-e247-4058-8375-c7817660f7b3 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Refreshing instance network info cache due to event network-changed-3c5e24a1-8ef7-45a5-a39a-4ce790adc338. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1179.378408] env[61978]: DEBUG oslo_concurrency.lockutils [req-00cf50f5-f03b-4ead-84c6-f2a7485b92d0 req-a50ef6a3-e247-4058-8375-c7817660f7b3 service nova] Acquiring lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.388374] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1179.388374] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5217ce22-420f-6e35-85a6-33dac676f6da" [ 1179.388374] env[61978]: _type = "HttpNfcLease" [ 1179.388374] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1179.392085] env[61978]: DEBUG oslo_vmware.rw_handles [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1179.392085] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5217ce22-420f-6e35-85a6-33dac676f6da" [ 1179.392085] env[61978]: _type = "HttpNfcLease" [ 1179.392085] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1179.392528] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395573, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.393755] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b321e0-48d5-4845-8d88-95e3a945bca4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.397657] env[61978]: DEBUG nova.network.neutron [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1179.406062] env[61978]: DEBUG oslo_vmware.rw_handles [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5257e5b6-e864-72be-3481-4ceb7685e5a0/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1179.406181] env[61978]: DEBUG oslo_vmware.rw_handles [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5257e5b6-e864-72be-3481-4ceb7685e5a0/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1179.508670] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-60a7841f-e75e-4295-a5ff-a230ae6e8366 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.548779] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.584277] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.611925] env[61978]: DEBUG nova.network.neutron [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updating instance_info_cache with network_info: [{"id": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "address": "fa:16:3e:14:c6:36", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5e24a1-8e", "ovs_interfaceid": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.770859] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ea4e94-a628-0ba5-2eed-2fd5424dc2eb, 'name': SearchDatastore_Task, 'duration_secs': 0.010571} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.771208] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1179.771776] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 5d1d19d8-241b-41b8-b1c0-caf54f8fd600/5d1d19d8-241b-41b8-b1c0-caf54f8fd600.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1179.771919] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8862e260-fbb7-4d15-88f6-870d54658f8e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.780347] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Waiting for the task: (returnval){ [ 1179.780347] env[61978]: value = "task-1395574" [ 1179.780347] env[61978]: _type = "Task" [ 1179.780347] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.790387] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395574, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.802126] env[61978]: DEBUG nova.compute.manager [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1179.836230] env[61978]: DEBUG nova.virt.hardware [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1179.836626] env[61978]: DEBUG nova.virt.hardware [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1179.836929] env[61978]: DEBUG nova.virt.hardware [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1179.837285] env[61978]: DEBUG nova.virt.hardware [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1179.837804] env[61978]: DEBUG nova.virt.hardware [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1179.838043] env[61978]: DEBUG nova.virt.hardware [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1179.838289] env[61978]: DEBUG nova.virt.hardware [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1179.838566] env[61978]: DEBUG nova.virt.hardware [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1179.838768] env[61978]: DEBUG nova.virt.hardware [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1179.839061] env[61978]: DEBUG nova.virt.hardware [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1179.839283] env[61978]: DEBUG nova.virt.hardware [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1179.840266] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04da2789-52ea-456a-8493-1ee305c631db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.853417] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d388f13-3f82-4f96-8ae2-5c5f403bb027 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.886099] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395573, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.114768] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1180.115816] env[61978]: DEBUG nova.compute.manager [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Instance network_info: |[{"id": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "address": "fa:16:3e:14:c6:36", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5e24a1-8e", "ovs_interfaceid": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1180.115816] env[61978]: DEBUG oslo_concurrency.lockutils [req-00cf50f5-f03b-4ead-84c6-f2a7485b92d0 req-a50ef6a3-e247-4058-8375-c7817660f7b3 service nova] Acquired lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.115816] env[61978]: DEBUG nova.network.neutron [req-00cf50f5-f03b-4ead-84c6-f2a7485b92d0 req-a50ef6a3-e247-4058-8375-c7817660f7b3 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Refreshing network info cache for port 3c5e24a1-8ef7-45a5-a39a-4ce790adc338 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1180.117450] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:c6:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c5e24a1-8ef7-45a5-a39a-4ce790adc338', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1180.132336] env[61978]: DEBUG oslo.service.loopingcall [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1180.140187] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1180.140187] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ee0a7ea-5efa-4453-b2c1-76d435a79f3e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.162413] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dba8749-458a-4488-a2e1-55d05bf9d5f3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.175017] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07462fec-be84-46aa-88d5-599f32f69b3f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.181218] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1180.181218] env[61978]: value = "task-1395575" [ 1180.181218] env[61978]: _type = "Task" [ 1180.181218] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.189711] env[61978]: DEBUG nova.compute.manager [req-cc5349c3-ffd4-45cc-91e4-d9701ce88126 req-291c7638-03c6-4e88-bff1-fd511105fe24 service nova] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Received event network-vif-plugged-52cdfaa1-00dd-4eed-94aa-d186999d0614 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1180.189923] env[61978]: DEBUG oslo_concurrency.lockutils [req-cc5349c3-ffd4-45cc-91e4-d9701ce88126 req-291c7638-03c6-4e88-bff1-fd511105fe24 service nova] Acquiring lock "c0be687a-7444-4019-8b12-dac41a7c080e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.190159] env[61978]: DEBUG oslo_concurrency.lockutils [req-cc5349c3-ffd4-45cc-91e4-d9701ce88126 req-291c7638-03c6-4e88-bff1-fd511105fe24 service nova] Lock "c0be687a-7444-4019-8b12-dac41a7c080e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.190440] env[61978]: DEBUG oslo_concurrency.lockutils [req-cc5349c3-ffd4-45cc-91e4-d9701ce88126 req-291c7638-03c6-4e88-bff1-fd511105fe24 service nova] Lock "c0be687a-7444-4019-8b12-dac41a7c080e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.190860] env[61978]: DEBUG nova.compute.manager [req-cc5349c3-ffd4-45cc-91e4-d9701ce88126 req-291c7638-03c6-4e88-bff1-fd511105fe24 service nova] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] No waiting events found dispatching network-vif-plugged-52cdfaa1-00dd-4eed-94aa-d186999d0614 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1180.190860] env[61978]: WARNING nova.compute.manager [req-cc5349c3-ffd4-45cc-91e4-d9701ce88126 req-291c7638-03c6-4e88-bff1-fd511105fe24 service nova] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Received unexpected event network-vif-plugged-52cdfaa1-00dd-4eed-94aa-d186999d0614 for instance with vm_state building and task_state spawning. [ 1180.219224] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6941bc06-bf4a-40d9-a956-e1dfc38ebfa7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.226883] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395575, 'name': CreateVM_Task} progress is 15%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.235688] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d177f368-52de-48c4-8c70-1b6981f15a5a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.258857] env[61978]: DEBUG nova.compute.provider_tree [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.293949] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395574, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.384810] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395573, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.633995] env[61978]: DEBUG nova.network.neutron [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Successfully updated port: 52cdfaa1-00dd-4eed-94aa-d186999d0614 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1180.692911] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395575, 'name': CreateVM_Task, 'duration_secs': 0.444094} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.693104] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1180.693874] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1180.694063] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.694629] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1180.694887] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffbaecf7-00b7-49ab-b8fb-fcf41b6dcb7f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.700918] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1180.700918] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52754dff-040f-079a-c36c-dc57b6174860" [ 1180.700918] env[61978]: _type = "Task" [ 1180.700918] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.710134] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52754dff-040f-079a-c36c-dc57b6174860, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.762554] env[61978]: DEBUG nova.scheduler.client.report [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1180.791861] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395574, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524623} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.792151] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 5d1d19d8-241b-41b8-b1c0-caf54f8fd600/5d1d19d8-241b-41b8-b1c0-caf54f8fd600.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1180.792413] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1180.792762] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4571157d-7dea-48d4-afbd-5a4edc7175bf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.801592] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Waiting for the task: (returnval){ [ 1180.801592] env[61978]: value = "task-1395576" [ 1180.801592] env[61978]: _type = "Task" [ 1180.801592] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.812841] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395576, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.871301] env[61978]: DEBUG nova.network.neutron [req-00cf50f5-f03b-4ead-84c6-f2a7485b92d0 req-a50ef6a3-e247-4058-8375-c7817660f7b3 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updated VIF entry in instance network info cache for port 3c5e24a1-8ef7-45a5-a39a-4ce790adc338. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1180.871695] env[61978]: DEBUG nova.network.neutron [req-00cf50f5-f03b-4ead-84c6-f2a7485b92d0 req-a50ef6a3-e247-4058-8375-c7817660f7b3 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updating instance_info_cache with network_info: [{"id": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "address": "fa:16:3e:14:c6:36", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5e24a1-8e", "ovs_interfaceid": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.885184] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395573, 'name': CreateSnapshot_Task, 'duration_secs': 1.033616} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.885515] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1180.886337] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3596e8-5923-4e2f-b1a3-25e58ff918a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.120882] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "97e128f9-7135-46b0-b22a-ee5449ba48b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.121142] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "97e128f9-7135-46b0-b22a-ee5449ba48b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.139039] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "refresh_cache-c0be687a-7444-4019-8b12-dac41a7c080e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.139197] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquired lock "refresh_cache-c0be687a-7444-4019-8b12-dac41a7c080e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.139346] env[61978]: DEBUG nova.network.neutron [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1181.213993] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52754dff-040f-079a-c36c-dc57b6174860, 'name': SearchDatastore_Task, 'duration_secs': 0.011152} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.214500] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.214851] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1181.215187] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.215416] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.215641] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1181.215922] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-951564b5-4f5c-4838-9f99-89d93b33ea32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.225839] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1181.226158] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1181.226901] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8816445d-1717-4a6f-813f-2e082f7ca279 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.233097] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1181.233097] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522a4235-65cf-a92d-63ad-08b0069b4c0b" [ 1181.233097] env[61978]: _type = "Task" [ 1181.233097] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.243133] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522a4235-65cf-a92d-63ad-08b0069b4c0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.270078] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.270703] env[61978]: DEBUG nova.compute.manager [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1181.273682] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.725s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.313074] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395576, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.215656} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.313533] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1181.314510] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4ec2a8-94be-4fbf-a671-27d99ccf1932 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.341612] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 5d1d19d8-241b-41b8-b1c0-caf54f8fd600/5d1d19d8-241b-41b8-b1c0-caf54f8fd600.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1181.343266] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10452305-def8-4439-a7f7-9c3d24b9f0ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.372796] env[61978]: DEBUG nova.virt.hardware [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1181.376289] env[61978]: DEBUG nova.virt.hardware [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1181.376289] env[61978]: DEBUG nova.virt.hardware [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1181.376289] env[61978]: DEBUG nova.virt.hardware [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1181.376289] env[61978]: DEBUG nova.virt.hardware [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1181.376289] env[61978]: DEBUG nova.virt.hardware [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1181.376289] env[61978]: DEBUG nova.virt.hardware [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1181.376289] env[61978]: DEBUG nova.virt.hardware [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1181.376289] env[61978]: DEBUG nova.virt.hardware [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1181.376572] env[61978]: DEBUG nova.virt.hardware [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1181.376756] env[61978]: DEBUG nova.virt.hardware [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1181.378909] env[61978]: DEBUG oslo_concurrency.lockutils [req-00cf50f5-f03b-4ead-84c6-f2a7485b92d0 req-a50ef6a3-e247-4058-8375-c7817660f7b3 service nova] Releasing lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1181.380053] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76bc732-ca17-4203-87bf-dd65b12c5c3a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.383303] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Waiting for the task: (returnval){ [ 1181.383303] env[61978]: value = "task-1395577" [ 1181.383303] env[61978]: _type = "Task" [ 1181.383303] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.391591] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fef91e8-d532-4ef3-9c3b-953847c673ae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.406803] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1181.407790] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395577, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.410065] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b8c01786-afab-4218-aa6d-beafc01a0e7e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.414074] env[61978]: DEBUG nova.compute.manager [req-2ae329a7-721e-4011-9196-c15f8dd31aa5 req-729e81aa-c160-4ced-88e6-51f37a31853b service nova] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Received event network-changed-52cdfaa1-00dd-4eed-94aa-d186999d0614 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1181.414442] env[61978]: DEBUG nova.compute.manager [req-2ae329a7-721e-4011-9196-c15f8dd31aa5 req-729e81aa-c160-4ced-88e6-51f37a31853b service nova] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Refreshing instance network info cache due to event network-changed-52cdfaa1-00dd-4eed-94aa-d186999d0614. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1181.414817] env[61978]: DEBUG oslo_concurrency.lockutils [req-2ae329a7-721e-4011-9196-c15f8dd31aa5 req-729e81aa-c160-4ced-88e6-51f37a31853b service nova] Acquiring lock "refresh_cache-c0be687a-7444-4019-8b12-dac41a7c080e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.424349] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:e7:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28538b34-2ffa-4e6e-a451-0654e6ec063d', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1181.432166] env[61978]: DEBUG oslo.service.loopingcall [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1181.433179] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1181.433525] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b332453-4e71-4396-8cd2-96a29343dffb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.450716] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1181.450716] env[61978]: value = "task-1395578" [ 1181.450716] env[61978]: _type = "Task" [ 1181.450716] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.457659] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1181.457659] env[61978]: value = "task-1395579" [ 1181.457659] env[61978]: _type = "Task" [ 1181.457659] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.463775] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395578, 'name': CloneVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.470407] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395579, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.615762] env[61978]: DEBUG oslo_concurrency.lockutils [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1181.616170] env[61978]: DEBUG oslo_concurrency.lockutils [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1181.623734] env[61978]: DEBUG nova.compute.manager [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1181.680373] env[61978]: DEBUG nova.network.neutron [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1181.744600] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522a4235-65cf-a92d-63ad-08b0069b4c0b, 'name': SearchDatastore_Task, 'duration_secs': 0.009645} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.748969] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e75903f6-1115-483e-9132-768f3e2dab5d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.758066] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1181.758066] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520de141-9408-1f3a-4d0c-3355f13f2e84" [ 1181.758066] env[61978]: _type = "Task" [ 1181.758066] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.769320] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520de141-9408-1f3a-4d0c-3355f13f2e84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.776966] env[61978]: DEBUG nova.compute.utils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1181.778589] env[61978]: DEBUG nova.compute.manager [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1181.778814] env[61978]: DEBUG nova.network.neutron [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1181.784788] env[61978]: INFO nova.compute.claims [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1181.859695] env[61978]: DEBUG nova.policy [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f20b272502341bd80be470f98554d1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d95ebcafdca43b8a1636e21c7258803', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1181.897374] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395577, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.958532] env[61978]: DEBUG nova.network.neutron [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Updating instance_info_cache with network_info: [{"id": "52cdfaa1-00dd-4eed-94aa-d186999d0614", "address": "fa:16:3e:db:2a:96", "network": {"id": "61175c4b-0b5d-4186-97ac-7a615b95ef28", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-341600036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d4d29d9b6a74b4887684c7b310280b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cdfaa1-00", "ovs_interfaceid": "52cdfaa1-00dd-4eed-94aa-d186999d0614", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.967627] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395578, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.973164] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395579, 'name': CreateVM_Task, 'duration_secs': 0.366808} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.973983] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1181.974815] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.975065] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.975667] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1181.976249] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f68563b1-208a-40c2-83a4-d4e51665995e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.983614] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1181.983614] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f6bb4f-9df3-dcc2-a210-8f993d364202" [ 1181.983614] env[61978]: _type = "Task" [ 1181.983614] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.993826] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f6bb4f-9df3-dcc2-a210-8f993d364202, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.123082] env[61978]: DEBUG nova.compute.utils [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1182.155281] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.183170] env[61978]: DEBUG nova.network.neutron [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Successfully created port: 73fe675b-ef9e-44db-a9d2-13d68f04aacb {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1182.270373] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520de141-9408-1f3a-4d0c-3355f13f2e84, 'name': SearchDatastore_Task, 'duration_secs': 0.01188} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.270742] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.271045] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9/758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1182.271381] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae1ec04e-f5c6-4fdd-935e-9b9269d43c07 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.280582] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1182.280582] env[61978]: value = "task-1395580" [ 1182.280582] env[61978]: _type = "Task" [ 1182.280582] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.290611] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395580, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.292288] env[61978]: DEBUG nova.compute.manager [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1182.296749] env[61978]: INFO nova.compute.resource_tracker [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating resource usage from migration baada567-1832-4740-9bdd-43a3e81f2aa6 [ 1182.399353] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395577, 'name': ReconfigVM_Task, 'duration_secs': 0.589878} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.402937] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 5d1d19d8-241b-41b8-b1c0-caf54f8fd600/5d1d19d8-241b-41b8-b1c0-caf54f8fd600.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1182.406186] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbbf7dc4-7269-49e8-bd05-89bd05720b61 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.415092] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Waiting for the task: (returnval){ [ 1182.415092] env[61978]: value = "task-1395581" [ 1182.415092] env[61978]: _type = "Task" [ 1182.415092] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.432663] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395581, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.465721] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395578, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.469159] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Releasing lock "refresh_cache-c0be687a-7444-4019-8b12-dac41a7c080e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.469466] env[61978]: DEBUG nova.compute.manager [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Instance network_info: |[{"id": "52cdfaa1-00dd-4eed-94aa-d186999d0614", "address": "fa:16:3e:db:2a:96", "network": {"id": "61175c4b-0b5d-4186-97ac-7a615b95ef28", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-341600036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d4d29d9b6a74b4887684c7b310280b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cdfaa1-00", "ovs_interfaceid": "52cdfaa1-00dd-4eed-94aa-d186999d0614", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1182.470159] env[61978]: DEBUG oslo_concurrency.lockutils [req-2ae329a7-721e-4011-9196-c15f8dd31aa5 req-729e81aa-c160-4ced-88e6-51f37a31853b service nova] Acquired lock "refresh_cache-c0be687a-7444-4019-8b12-dac41a7c080e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.470493] env[61978]: DEBUG nova.network.neutron [req-2ae329a7-721e-4011-9196-c15f8dd31aa5 req-729e81aa-c160-4ced-88e6-51f37a31853b service nova] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Refreshing network info cache for port 52cdfaa1-00dd-4eed-94aa-d186999d0614 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1182.471754] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:2a:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52cdfaa1-00dd-4eed-94aa-d186999d0614', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1182.479963] env[61978]: DEBUG oslo.service.loopingcall [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1182.480596] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1182.481643] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4e691be-a0bf-43e1-8370-3bc252a33c1e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.518075] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f6bb4f-9df3-dcc2-a210-8f993d364202, 'name': SearchDatastore_Task, 'duration_secs': 0.010659} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.522325] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.522606] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1182.522850] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1182.523064] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.523295] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1182.523599] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1182.523599] env[61978]: value = "task-1395582" [ 1182.523599] env[61978]: _type = "Task" [ 1182.523599] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.524229] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07288379-574a-40bd-a192-83181fa396b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.551933] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395582, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.560500] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1182.561376] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1182.561482] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c78a954-b7a1-4e38-ae98-81cce3b34f7a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.571759] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1182.571759] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52308b9c-60bc-012a-d715-f4608c6daa45" [ 1182.571759] env[61978]: _type = "Task" [ 1182.571759] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.586989] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52308b9c-60bc-012a-d715-f4608c6daa45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.626396] env[61978]: DEBUG oslo_concurrency.lockutils [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.671349] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feefeb37-a5a7-4cef-87c8-fb4542f5809e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.682396] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b614f82-153c-4c53-8189-9cd4f97ce16f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.725026] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd11e88-0f58-4ad3-94ad-573e0ec603fb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.746943] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee7e3fd-c73f-4bb9-b8ff-9a594a7d1efb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.774483] env[61978]: DEBUG nova.compute.provider_tree [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1182.792616] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395580, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.931349] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395581, 'name': Rename_Task, 'duration_secs': 0.373543} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.931349] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1182.931349] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fabf7233-a46d-4577-b388-c8c351d8823d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.939019] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Waiting for the task: (returnval){ [ 1182.939019] env[61978]: value = "task-1395583" [ 1182.939019] env[61978]: _type = "Task" [ 1182.939019] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.946907] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395583, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.964036] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395578, 'name': CloneVM_Task, 'duration_secs': 1.230865} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.964036] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Created linked-clone VM from snapshot [ 1182.964629] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1343c226-4ff0-4fdb-9863-bd861211b17e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.973297] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Uploading image aade9a2f-a0b8-4a86-bf1c-3631eaadc81e {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1182.989567] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1182.989947] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fc2207d0-1d1b-4c32-b798-07ad2131cba6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.000802] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1183.000802] env[61978]: value = "task-1395584" [ 1183.000802] env[61978]: _type = "Task" [ 1183.000802] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.013018] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395584, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.037216] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395582, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.083477] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52308b9c-60bc-012a-d715-f4608c6daa45, 'name': SearchDatastore_Task, 'duration_secs': 0.076666} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.084326] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe14b478-39fa-4fa8-9ca7-830fafa24e95 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.092049] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1183.092049] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5234ab76-bb2a-05b5-a5be-7c1b7ef97c40" [ 1183.092049] env[61978]: _type = "Task" [ 1183.092049] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.106389] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5234ab76-bb2a-05b5-a5be-7c1b7ef97c40, 'name': SearchDatastore_Task, 'duration_secs': 0.010428} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.106795] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.107187] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] ae6b92bb-6f79-4b52-bdb7-095985bf2fad/ae6b92bb-6f79-4b52-bdb7-095985bf2fad.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1183.107550] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4232975d-6520-4b54-b2bc-094f9d62071c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.118062] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1183.118062] env[61978]: value = "task-1395585" [ 1183.118062] env[61978]: _type = "Task" [ 1183.118062] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.129112] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395585, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.279031] env[61978]: DEBUG nova.scheduler.client.report [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1183.294840] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395580, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530501} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.295814] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9/758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1183.295814] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1183.295814] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce0ae3e9-5253-4127-80e4-1cab87327e2f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.309803] env[61978]: DEBUG nova.compute.manager [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1183.315794] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1183.315794] env[61978]: value = "task-1395586" [ 1183.315794] env[61978]: _type = "Task" [ 1183.315794] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.327997] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395586, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.344545] env[61978]: DEBUG nova.virt.hardware [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1183.345058] env[61978]: DEBUG nova.virt.hardware [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1183.345058] env[61978]: DEBUG nova.virt.hardware [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1183.345353] env[61978]: DEBUG nova.virt.hardware [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1183.345423] env[61978]: DEBUG nova.virt.hardware [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1183.346844] env[61978]: DEBUG nova.virt.hardware [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1183.346844] env[61978]: DEBUG nova.virt.hardware [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1183.346844] env[61978]: DEBUG nova.virt.hardware [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1183.346844] env[61978]: DEBUG nova.virt.hardware [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1183.346844] env[61978]: DEBUG nova.virt.hardware [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1183.346844] env[61978]: DEBUG nova.virt.hardware [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1183.348125] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6416ff60-21be-4202-a4a9-2df6d3791977 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.361382] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1aeb30-e28a-4fec-8ef4-fb906a8eda59 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.451542] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395583, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.466906] env[61978]: DEBUG nova.network.neutron [req-2ae329a7-721e-4011-9196-c15f8dd31aa5 req-729e81aa-c160-4ced-88e6-51f37a31853b service nova] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Updated VIF entry in instance network info cache for port 52cdfaa1-00dd-4eed-94aa-d186999d0614. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1183.467437] env[61978]: DEBUG nova.network.neutron [req-2ae329a7-721e-4011-9196-c15f8dd31aa5 req-729e81aa-c160-4ced-88e6-51f37a31853b service nova] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Updating instance_info_cache with network_info: [{"id": "52cdfaa1-00dd-4eed-94aa-d186999d0614", "address": "fa:16:3e:db:2a:96", "network": {"id": "61175c4b-0b5d-4186-97ac-7a615b95ef28", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-341600036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d4d29d9b6a74b4887684c7b310280b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52cdfaa1-00", "ovs_interfaceid": "52cdfaa1-00dd-4eed-94aa-d186999d0614", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.514023] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395584, 'name': Destroy_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.549487] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395582, 'name': CreateVM_Task, 'duration_secs': 0.551632} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.549691] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1183.550494] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1183.550716] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.551142] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1183.551442] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52d4f0f2-976a-4fa7-9786-7972ed714a5a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.558391] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1183.558391] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d2968a-6182-0282-33d4-8d1b31f2b043" [ 1183.558391] env[61978]: _type = "Task" [ 1183.558391] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.568314] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d2968a-6182-0282-33d4-8d1b31f2b043, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.628254] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395585, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460504} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.628623] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] ae6b92bb-6f79-4b52-bdb7-095985bf2fad/ae6b92bb-6f79-4b52-bdb7-095985bf2fad.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1183.628854] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1183.629149] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08fbf2d4-84f0-459b-8f62-2491c1f6de8b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.636723] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1183.636723] env[61978]: value = "task-1395587" [ 1183.636723] env[61978]: _type = "Task" [ 1183.636723] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.646554] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395587, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.656172] env[61978]: DEBUG nova.compute.manager [req-5380a6b3-c4a7-4953-bcde-12eb228ca0d2 req-a1d0ce63-ffa3-4b16-a16a-681541927029 service nova] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Received event network-vif-plugged-73fe675b-ef9e-44db-a9d2-13d68f04aacb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1183.656506] env[61978]: DEBUG oslo_concurrency.lockutils [req-5380a6b3-c4a7-4953-bcde-12eb228ca0d2 req-a1d0ce63-ffa3-4b16-a16a-681541927029 service nova] Acquiring lock "68791dff-12e0-499d-8835-1e9173af570f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1183.656881] env[61978]: DEBUG oslo_concurrency.lockutils [req-5380a6b3-c4a7-4953-bcde-12eb228ca0d2 req-a1d0ce63-ffa3-4b16-a16a-681541927029 service nova] Lock "68791dff-12e0-499d-8835-1e9173af570f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.657228] env[61978]: DEBUG oslo_concurrency.lockutils [req-5380a6b3-c4a7-4953-bcde-12eb228ca0d2 req-a1d0ce63-ffa3-4b16-a16a-681541927029 service nova] Lock "68791dff-12e0-499d-8835-1e9173af570f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.657415] env[61978]: DEBUG nova.compute.manager [req-5380a6b3-c4a7-4953-bcde-12eb228ca0d2 req-a1d0ce63-ffa3-4b16-a16a-681541927029 service nova] [instance: 68791dff-12e0-499d-8835-1e9173af570f] No waiting events found dispatching network-vif-plugged-73fe675b-ef9e-44db-a9d2-13d68f04aacb {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1183.657621] env[61978]: WARNING nova.compute.manager [req-5380a6b3-c4a7-4953-bcde-12eb228ca0d2 req-a1d0ce63-ffa3-4b16-a16a-681541927029 service nova] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Received unexpected event network-vif-plugged-73fe675b-ef9e-44db-a9d2-13d68f04aacb for instance with vm_state building and task_state spawning. [ 1183.716794] env[61978]: DEBUG oslo_concurrency.lockutils [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1183.717149] env[61978]: DEBUG oslo_concurrency.lockutils [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.717406] env[61978]: INFO nova.compute.manager [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Attaching volume 96d26999-1056-47ad-b42f-1ccc31fe5872 to /dev/sdb [ 1183.755070] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b684705-32ca-44b0-a44f-f1410bd742fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.762886] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ffc95e-5329-47d5-9d58-8c06994545f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.779198] env[61978]: DEBUG nova.virt.block_device [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Updating existing volume attachment record: bea153ad-58f6-4896-b08f-2aa017f051f8 {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1183.788565] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.515s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.788833] env[61978]: INFO nova.compute.manager [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Migrating [ 1183.795924] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.212s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.796255] env[61978]: DEBUG nova.objects.instance [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lazy-loading 'resources' on Instance uuid 81f0b79c-97b3-4a5d-a8fc-7c2250571177 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1183.827072] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395586, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.282398} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.828027] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1183.828938] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892efb8b-e9c4-41a1-9339-f416d5667f61 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.855761] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9/758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1183.856630] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a044383-7faa-4fe3-b5ea-42ee1f34ef6a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.883489] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1183.883489] env[61978]: value = "task-1395588" [ 1183.883489] env[61978]: _type = "Task" [ 1183.883489] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.892220] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395588, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.950457] env[61978]: DEBUG oslo_vmware.api [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395583, 'name': PowerOnVM_Task, 'duration_secs': 0.887488} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.950771] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1183.951017] env[61978]: INFO nova.compute.manager [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Took 8.97 seconds to spawn the instance on the hypervisor. [ 1183.951238] env[61978]: DEBUG nova.compute.manager [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1183.952159] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161f5bd7-6060-45d5-90ac-21e8fb3084e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.970581] env[61978]: DEBUG oslo_concurrency.lockutils [req-2ae329a7-721e-4011-9196-c15f8dd31aa5 req-729e81aa-c160-4ced-88e6-51f37a31853b service nova] Releasing lock "refresh_cache-c0be687a-7444-4019-8b12-dac41a7c080e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1184.014012] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395584, 'name': Destroy_Task, 'duration_secs': 0.759053} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.014389] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Destroyed the VM [ 1184.014669] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1184.014968] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e17f1e10-6c39-4724-b0df-f33128540c9a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.023247] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1184.023247] env[61978]: value = "task-1395590" [ 1184.023247] env[61978]: _type = "Task" [ 1184.023247] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.034835] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395590, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.070336] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d2968a-6182-0282-33d4-8d1b31f2b043, 'name': SearchDatastore_Task, 'duration_secs': 0.010243} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.070632] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1184.070924] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1184.071261] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1184.071432] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.071624] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1184.071907] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3034140-8cc9-44a3-a90f-43566141e47f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.082756] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1184.082999] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1184.083807] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08f30ed8-2613-4b69-b169-4dfe84d4feaf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.090643] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1184.090643] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f11c9b-bd75-7cc7-8a62-ce4dd31f9940" [ 1184.090643] env[61978]: _type = "Task" [ 1184.090643] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.100349] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f11c9b-bd75-7cc7-8a62-ce4dd31f9940, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.148990] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395587, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081129} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.149278] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1184.150082] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff05ef9f-0214-408e-88e3-a66efd682653 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.173146] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] ae6b92bb-6f79-4b52-bdb7-095985bf2fad/ae6b92bb-6f79-4b52-bdb7-095985bf2fad.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1184.173515] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57cd27f2-d3cc-4014-ac94-de96acfddb2a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.197630] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1184.197630] env[61978]: value = "task-1395591" [ 1184.197630] env[61978]: _type = "Task" [ 1184.197630] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.207630] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395591, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.235835] env[61978]: DEBUG nova.network.neutron [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Successfully updated port: 73fe675b-ef9e-44db-a9d2-13d68f04aacb {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1184.267768] env[61978]: DEBUG nova.compute.manager [req-165e6604-b7a3-4fb8-a310-3b029ea8a2d4 req-b28fe071-2311-4ca5-8889-66bb0e5b77ea service nova] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Received event network-changed-73fe675b-ef9e-44db-a9d2-13d68f04aacb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1184.268021] env[61978]: DEBUG nova.compute.manager [req-165e6604-b7a3-4fb8-a310-3b029ea8a2d4 req-b28fe071-2311-4ca5-8889-66bb0e5b77ea service nova] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Refreshing instance network info cache due to event network-changed-73fe675b-ef9e-44db-a9d2-13d68f04aacb. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1184.268274] env[61978]: DEBUG oslo_concurrency.lockutils [req-165e6604-b7a3-4fb8-a310-3b029ea8a2d4 req-b28fe071-2311-4ca5-8889-66bb0e5b77ea service nova] Acquiring lock "refresh_cache-68791dff-12e0-499d-8835-1e9173af570f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1184.268461] env[61978]: DEBUG oslo_concurrency.lockutils [req-165e6604-b7a3-4fb8-a310-3b029ea8a2d4 req-b28fe071-2311-4ca5-8889-66bb0e5b77ea service nova] Acquired lock "refresh_cache-68791dff-12e0-499d-8835-1e9173af570f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.268646] env[61978]: DEBUG nova.network.neutron [req-165e6604-b7a3-4fb8-a310-3b029ea8a2d4 req-b28fe071-2311-4ca5-8889-66bb0e5b77ea service nova] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Refreshing network info cache for port 73fe675b-ef9e-44db-a9d2-13d68f04aacb {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1184.310493] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1184.310746] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.310999] env[61978]: DEBUG nova.network.neutron [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1184.398205] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395588, 'name': ReconfigVM_Task, 'duration_secs': 0.363243} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.399133] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9/758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1184.399483] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97ffc694-fda5-4272-9640-b0b2b861ca0d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.408601] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1184.408601] env[61978]: value = "task-1395592" [ 1184.408601] env[61978]: _type = "Task" [ 1184.408601] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.422740] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395592, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.477985] env[61978]: INFO nova.compute.manager [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Took 15.04 seconds to build instance. [ 1184.550876] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395590, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.611420] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f11c9b-bd75-7cc7-8a62-ce4dd31f9940, 'name': SearchDatastore_Task, 'duration_secs': 0.012652} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.619040] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0455498-90dc-455f-a663-5b9cb1714d52 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.628316] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1184.628316] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5276e0e1-a6c7-98d8-26aa-6fd9c6f76ce9" [ 1184.628316] env[61978]: _type = "Task" [ 1184.628316] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.633375] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e91535-e716-4ad3-a3d9-e8444d073fbc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.643492] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5276e0e1-a6c7-98d8-26aa-6fd9c6f76ce9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.646719] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddae4161-9a8b-47a0-a46d-6ac306c2d7e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.683938] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf6ae57-8169-4f0e-b4ea-3817e6e0e307 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.694583] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00103097-5b60-4a4d-90da-193e0000cc34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.710167] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395591, 'name': ReconfigVM_Task, 'duration_secs': 0.474379} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.719992] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Reconfigured VM instance instance-00000046 to attach disk [datastore2] ae6b92bb-6f79-4b52-bdb7-095985bf2fad/ae6b92bb-6f79-4b52-bdb7-095985bf2fad.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1184.724289] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'encryption_format': None, 'encrypted': False, 'device_type': 'disk', 'disk_bus': None, 'size': 0, 'encryption_options': None, 'encryption_secret_uuid': None, 'guest_format': None, 'boot_index': 0, 'image_id': '4732143d-796a-4a66-9f1e-806f8b0654e0'}], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sdb', 'disk_bus': None, 'device_type': None, 'attachment_id': 'f7498c52-cf52-470e-b4d9-9996d6ec6c99', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295980', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'name': 'volume-861ed39b-543d-436d-a50d-364cfadf8e50', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ae6b92bb-6f79-4b52-bdb7-095985bf2fad', 'attached_at': '', 'detached_at': '', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'serial': '861ed39b-543d-436d-a50d-364cfadf8e50'}, 'boot_index': None, 'guest_format': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=61978) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1184.724289] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1184.724289] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295980', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'name': 'volume-861ed39b-543d-436d-a50d-364cfadf8e50', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ae6b92bb-6f79-4b52-bdb7-095985bf2fad', 'attached_at': '', 'detached_at': '', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'serial': '861ed39b-543d-436d-a50d-364cfadf8e50'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1184.724289] env[61978]: DEBUG nova.compute.provider_tree [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1184.724289] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67d34b6-a1a4-4467-9942-cc92d9d17149 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.742617] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "refresh_cache-68791dff-12e0-499d-8835-1e9173af570f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1184.744425] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea103bad-f0af-492e-b558-9fb67cc63103 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.772949] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] volume-861ed39b-543d-436d-a50d-364cfadf8e50/volume-861ed39b-543d-436d-a50d-364cfadf8e50.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1184.775702] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d84453a-80ce-4f25-a8b1-9c95d30296dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.796746] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1184.796746] env[61978]: value = "task-1395593" [ 1184.796746] env[61978]: _type = "Task" [ 1184.796746] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.807085] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395593, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.819863] env[61978]: DEBUG nova.network.neutron [req-165e6604-b7a3-4fb8-a310-3b029ea8a2d4 req-b28fe071-2311-4ca5-8889-66bb0e5b77ea service nova] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1184.922228] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395592, 'name': Rename_Task, 'duration_secs': 0.282021} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.922469] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1184.922742] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-878a6919-8fe6-4ac9-a764-708c5ba4d0ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.930993] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1184.930993] env[61978]: value = "task-1395594" [ 1184.930993] env[61978]: _type = "Task" [ 1184.930993] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.944120] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395594, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.979857] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f115f8a6-9cac-4c61-bf95-06c3dca64baa tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Lock "5d1d19d8-241b-41b8-b1c0-caf54f8fd600" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.567s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.986345] env[61978]: DEBUG nova.network.neutron [req-165e6604-b7a3-4fb8-a310-3b029ea8a2d4 req-b28fe071-2311-4ca5-8889-66bb0e5b77ea service nova] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.037659] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395590, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.040745] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Acquiring lock "5d1d19d8-241b-41b8-b1c0-caf54f8fd600" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.041110] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Lock "5d1d19d8-241b-41b8-b1c0-caf54f8fd600" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.041562] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Acquiring lock "5d1d19d8-241b-41b8-b1c0-caf54f8fd600-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.041562] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Lock "5d1d19d8-241b-41b8-b1c0-caf54f8fd600-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.041720] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Lock "5d1d19d8-241b-41b8-b1c0-caf54f8fd600-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.043855] env[61978]: INFO nova.compute.manager [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Terminating instance [ 1185.045911] env[61978]: DEBUG nova.compute.manager [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1185.046349] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1185.047279] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a396504d-165f-42df-a485-c691a68a77d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.055920] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1185.056249] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b55e3684-1247-4e6d-8b50-94b125bb71dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.064063] env[61978]: DEBUG oslo_vmware.api [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Waiting for the task: (returnval){ [ 1185.064063] env[61978]: value = "task-1395595" [ 1185.064063] env[61978]: _type = "Task" [ 1185.064063] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.075277] env[61978]: DEBUG oslo_vmware.api [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395595, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.143263] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5276e0e1-a6c7-98d8-26aa-6fd9c6f76ce9, 'name': SearchDatastore_Task, 'duration_secs': 0.021509} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.143263] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1185.143263] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] c0be687a-7444-4019-8b12-dac41a7c080e/c0be687a-7444-4019-8b12-dac41a7c080e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1185.143263] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9190b66-fd37-4429-9c28-d728bab94795 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.152352] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1185.152352] env[61978]: value = "task-1395596" [ 1185.152352] env[61978]: _type = "Task" [ 1185.152352] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.153448] env[61978]: DEBUG nova.network.neutron [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance_info_cache with network_info: [{"id": "2efcc135-18f4-45d3-9408-817cdbada770", "address": "fa:16:3e:8c:cc:2c", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2efcc135-18", "ovs_interfaceid": "2efcc135-18f4-45d3-9408-817cdbada770", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.165240] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395596, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.229741] env[61978]: DEBUG nova.scheduler.client.report [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1185.310654] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395593, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.442836] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395594, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.489474] env[61978]: DEBUG oslo_concurrency.lockutils [req-165e6604-b7a3-4fb8-a310-3b029ea8a2d4 req-b28fe071-2311-4ca5-8889-66bb0e5b77ea service nova] Releasing lock "refresh_cache-68791dff-12e0-499d-8835-1e9173af570f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1185.490120] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "refresh_cache-68791dff-12e0-499d-8835-1e9173af570f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.490282] env[61978]: DEBUG nova.network.neutron [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1185.549367] env[61978]: DEBUG oslo_vmware.api [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395590, 'name': RemoveSnapshot_Task, 'duration_secs': 1.31096} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.549749] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1185.578987] env[61978]: DEBUG oslo_vmware.api [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395595, 'name': PowerOffVM_Task, 'duration_secs': 0.233243} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.579479] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1185.579795] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1185.580279] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c34e5337-91d3-4400-bb19-092c3ffee1f3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.660059] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1185.668406] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395596, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.738544] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.942s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.742869] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.587s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.744725] env[61978]: INFO nova.compute.claims [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1185.748757] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1185.748951] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1185.749165] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Deleting the datastore file [datastore2] 5d1d19d8-241b-41b8-b1c0-caf54f8fd600 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1185.750150] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bfcae666-15dd-4955-8a9d-ca1df55c73b1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.759898] env[61978]: DEBUG oslo_vmware.api [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Waiting for the task: (returnval){ [ 1185.759898] env[61978]: value = "task-1395598" [ 1185.759898] env[61978]: _type = "Task" [ 1185.759898] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.766792] env[61978]: INFO nova.scheduler.client.report [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted allocations for instance 81f0b79c-97b3-4a5d-a8fc-7c2250571177 [ 1185.777213] env[61978]: DEBUG oslo_vmware.api [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.814116] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395593, 'name': ReconfigVM_Task, 'duration_secs': 0.600893} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.814116] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Reconfigured VM instance instance-00000046 to attach disk [datastore2] volume-861ed39b-543d-436d-a50d-364cfadf8e50/volume-861ed39b-543d-436d-a50d-364cfadf8e50.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1185.820555] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d678d03-686f-4317-9ee2-5a8b64f0bab4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.839599] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1185.839599] env[61978]: value = "task-1395599" [ 1185.839599] env[61978]: _type = "Task" [ 1185.839599] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.853013] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395599, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.948277] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395594, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.048561] env[61978]: DEBUG nova.network.neutron [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1186.054709] env[61978]: WARNING nova.compute.manager [None req-ae08fb83-ec92-41e8-907a-afbd8b5aa0e9 tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Image not found during snapshot: nova.exception.ImageNotFound: Image aade9a2f-a0b8-4a86-bf1c-3631eaadc81e could not be found. [ 1186.169675] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395596, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.75194} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.170211] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] c0be687a-7444-4019-8b12-dac41a7c080e/c0be687a-7444-4019-8b12-dac41a7c080e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1186.170447] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1186.170730] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f92c0bb-21f0-4822-8e04-8368779aef76 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.178816] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1186.178816] env[61978]: value = "task-1395601" [ 1186.178816] env[61978]: _type = "Task" [ 1186.178816] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.188882] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395601, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.235637] env[61978]: DEBUG nova.network.neutron [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Updating instance_info_cache with network_info: [{"id": "73fe675b-ef9e-44db-a9d2-13d68f04aacb", "address": "fa:16:3e:a1:d0:9a", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73fe675b-ef", "ovs_interfaceid": "73fe675b-ef9e-44db-a9d2-13d68f04aacb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.278946] env[61978]: DEBUG oslo_vmware.api [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Task: {'id': task-1395598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.356897} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.279276] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d15aa3c7-78fc-42bf-8637-ccdcb80c98b5 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "81f0b79c-97b3-4a5d-a8fc-7c2250571177" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.185s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.280554] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1186.280882] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1186.281234] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1186.281539] env[61978]: INFO nova.compute.manager [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1186.281944] env[61978]: DEBUG oslo.service.loopingcall [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1186.282848] env[61978]: DEBUG nova.compute.manager [-] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1186.283078] env[61978]: DEBUG nova.network.neutron [-] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1186.350809] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395599, 'name': ReconfigVM_Task, 'duration_secs': 0.265907} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.353651] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295980', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'name': 'volume-861ed39b-543d-436d-a50d-364cfadf8e50', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ae6b92bb-6f79-4b52-bdb7-095985bf2fad', 'attached_at': '', 'detached_at': '', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'serial': '861ed39b-543d-436d-a50d-364cfadf8e50'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1186.354505] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2775355f-4f9a-4dc5-a60f-97b2118c35f8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.364110] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1186.364110] env[61978]: value = "task-1395602" [ 1186.364110] env[61978]: _type = "Task" [ 1186.364110] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.375160] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395602, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.449842] env[61978]: DEBUG oslo_vmware.api [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395594, 'name': PowerOnVM_Task, 'duration_secs': 1.064876} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.450311] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1186.450419] env[61978]: INFO nova.compute.manager [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1186.450686] env[61978]: DEBUG nova.compute.manager [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1186.451558] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dde70d7-a0fe-4e93-a35c-eafb7be64b50 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.540780] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "612aba6c-a30d-4eeb-8f85-e791bda55582" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.541082] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "612aba6c-a30d-4eeb-8f85-e791bda55582" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.541316] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "612aba6c-a30d-4eeb-8f85-e791bda55582-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.541770] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "612aba6c-a30d-4eeb-8f85-e791bda55582-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.541770] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "612aba6c-a30d-4eeb-8f85-e791bda55582-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.544655] env[61978]: INFO nova.compute.manager [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Terminating instance [ 1186.546667] env[61978]: DEBUG nova.compute.manager [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1186.546933] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1186.547833] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8367f0-d641-4674-95b6-40a2466342cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.556422] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.559451] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba1883bf-1a90-4fda-bf2f-c47d42ce4887 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.568254] env[61978]: DEBUG oslo_vmware.api [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1186.568254] env[61978]: value = "task-1395603" [ 1186.568254] env[61978]: _type = "Task" [ 1186.568254] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.576492] env[61978]: DEBUG oslo_vmware.api [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.580344] env[61978]: DEBUG nova.compute.manager [req-009a5129-3f14-466e-bf20-95e3836f7006 req-c7a84b7d-9673-44d0-8e3f-b77d0f4613b2 service nova] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Received event network-vif-deleted-52e402de-991f-4bee-bc93-9d0ac255d4b7 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1186.580559] env[61978]: INFO nova.compute.manager [req-009a5129-3f14-466e-bf20-95e3836f7006 req-c7a84b7d-9673-44d0-8e3f-b77d0f4613b2 service nova] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Neutron deleted interface 52e402de-991f-4bee-bc93-9d0ac255d4b7; detaching it from the instance and deleting it from the info cache [ 1186.580808] env[61978]: DEBUG nova.network.neutron [req-009a5129-3f14-466e-bf20-95e3836f7006 req-c7a84b7d-9673-44d0-8e3f-b77d0f4613b2 service nova] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.689846] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395601, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116374} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.690189] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1186.691017] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae01deb-ca87-4a4e-bc38-04218a8595d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.713138] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] c0be687a-7444-4019-8b12-dac41a7c080e/c0be687a-7444-4019-8b12-dac41a7c080e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1186.713788] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19014bcf-ae43-44ec-ab12-0c7342fb6304 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.734027] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1186.734027] env[61978]: value = "task-1395604" [ 1186.734027] env[61978]: _type = "Task" [ 1186.734027] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.738971] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "refresh_cache-68791dff-12e0-499d-8835-1e9173af570f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1186.739302] env[61978]: DEBUG nova.compute.manager [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Instance network_info: |[{"id": "73fe675b-ef9e-44db-a9d2-13d68f04aacb", "address": "fa:16:3e:a1:d0:9a", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73fe675b-ef", "ovs_interfaceid": "73fe675b-ef9e-44db-a9d2-13d68f04aacb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1186.742911] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:d0:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73fe675b-ef9e-44db-a9d2-13d68f04aacb', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1186.751057] env[61978]: DEBUG oslo.service.loopingcall [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1186.751057] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395604, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.751057] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1186.751057] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6b19f41e-f8b6-450e-a3e7-1a4d4f7782d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.773615] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1186.773615] env[61978]: value = "task-1395605" [ 1186.773615] env[61978]: _type = "Task" [ 1186.773615] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.781380] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395605, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.874426] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395602, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.921779] env[61978]: DEBUG oslo_concurrency.lockutils [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.922133] env[61978]: DEBUG oslo_concurrency.lockutils [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.976127] env[61978]: INFO nova.compute.manager [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Took 14.58 seconds to build instance. [ 1187.024243] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1060bc-d017-4f6d-b06b-9fb25a9c2eeb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.032210] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe49872-aefa-48fa-a460-7e1b2e8215f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.063118] env[61978]: DEBUG nova.network.neutron [-] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.065192] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d236938-c2d3-4c27-8d11-1540dd571073 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.080258] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd75dced-1861-45a1-8b60-37f913bd59e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.084614] env[61978]: DEBUG oslo_vmware.api [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.085581] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d3b4e89-8bac-432b-9df5-771a0826c848 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.098038] env[61978]: DEBUG nova.compute.provider_tree [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.103375] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b50890-de9d-4c5b-a259-75078c417cb8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.140245] env[61978]: DEBUG nova.compute.manager [req-009a5129-3f14-466e-bf20-95e3836f7006 req-c7a84b7d-9673-44d0-8e3f-b77d0f4613b2 service nova] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Detach interface failed, port_id=52e402de-991f-4bee-bc93-9d0ac255d4b7, reason: Instance 5d1d19d8-241b-41b8-b1c0-caf54f8fd600 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1187.178538] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a514b5ae-daeb-4c0b-b3d1-ae9e2a51878e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.198776] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance 'b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2' progress to 0 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1187.244520] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395604, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.284429] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395605, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.373807] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395602, 'name': Rename_Task, 'duration_secs': 0.926593} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.374172] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1187.374481] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4593a9b9-f942-4813-88d6-231ba92bd2c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.383020] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1187.383020] env[61978]: value = "task-1395606" [ 1187.383020] env[61978]: _type = "Task" [ 1187.383020] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.400028] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395606, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.428020] env[61978]: INFO nova.compute.manager [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Detaching volume df5c8cba-6cf8-4d47-9b7b-37971eba01d7 [ 1187.468479] env[61978]: INFO nova.virt.block_device [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Attempting to driver detach volume df5c8cba-6cf8-4d47-9b7b-37971eba01d7 from mountpoint /dev/sdb [ 1187.468479] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1187.468479] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295958', 'volume_id': 'df5c8cba-6cf8-4d47-9b7b-37971eba01d7', 'name': 'volume-df5c8cba-6cf8-4d47-9b7b-37971eba01d7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9ee04ee8-98ec-4be9-935d-cad7cd176466', 'attached_at': '', 'detached_at': '', 'volume_id': 'df5c8cba-6cf8-4d47-9b7b-37971eba01d7', 'serial': 'df5c8cba-6cf8-4d47-9b7b-37971eba01d7'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1187.468479] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64af714-ee9d-4204-b8db-3db608ab0db9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.501170] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c4c1cbfe-73ac-434e-8ef2-ccf9caf2a119 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.121s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.502727] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd713ac5-e8c9-4679-a1dc-571dadb13a5a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.512820] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522255ca-6671-491b-bab1-80d6efcdf91c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.538037] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d47f81e-b97b-4ac7-875f-ac0c11f3fbdc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.556235] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] The volume has not been displaced from its original location: [datastore2] volume-df5c8cba-6cf8-4d47-9b7b-37971eba01d7/volume-df5c8cba-6cf8-4d47-9b7b-37971eba01d7.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1187.562684] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Reconfiguring VM instance instance-0000003a to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1187.563122] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6cb0206-41be-44b4-8da3-3e5425370c4a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.580781] env[61978]: INFO nova.compute.manager [-] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Took 1.30 seconds to deallocate network for instance. [ 1187.592899] env[61978]: DEBUG oslo_vmware.api [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1187.592899] env[61978]: value = "task-1395607" [ 1187.592899] env[61978]: _type = "Task" [ 1187.592899] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.600265] env[61978]: DEBUG oslo_vmware.api [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395603, 'name': PowerOffVM_Task, 'duration_secs': 0.989632} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.601197] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1187.601471] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1187.602478] env[61978]: DEBUG nova.scheduler.client.report [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1187.606153] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-807c9eb8-7f4e-480c-9a57-caa670e474f4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.617285] env[61978]: DEBUG oslo_vmware.api [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395607, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.686332] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1187.686847] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1187.686847] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleting the datastore file [datastore2] 612aba6c-a30d-4eeb-8f85-e791bda55582 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1187.687165] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c42a83de-dee6-4e26-aeed-940c33f2bdf3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.694025] env[61978]: DEBUG oslo_vmware.api [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for the task: (returnval){ [ 1187.694025] env[61978]: value = "task-1395609" [ 1187.694025] env[61978]: _type = "Task" [ 1187.694025] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.704856] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1187.705486] env[61978]: DEBUG oslo_vmware.api [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395609, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.705587] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cde45028-459c-44f7-88ad-7370c931e237 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.711800] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1187.711800] env[61978]: value = "task-1395610" [ 1187.711800] env[61978]: _type = "Task" [ 1187.711800] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.720557] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395610, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.744648] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395604, 'name': ReconfigVM_Task, 'duration_secs': 0.824959} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.744949] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Reconfigured VM instance instance-00000055 to attach disk [datastore2] c0be687a-7444-4019-8b12-dac41a7c080e/c0be687a-7444-4019-8b12-dac41a7c080e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1187.745663] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd570fd4-d814-40f9-8bc3-0f795a43636a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.751948] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1187.751948] env[61978]: value = "task-1395611" [ 1187.751948] env[61978]: _type = "Task" [ 1187.751948] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.760813] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395611, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.786163] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395605, 'name': CreateVM_Task, 'duration_secs': 0.940418} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.786406] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1187.787233] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.787453] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.787850] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1187.788206] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21665b80-f455-4f7d-90b1-05e861cee696 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.793868] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1187.793868] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52920e6d-095c-5065-fae6-9c1274643c1e" [ 1187.793868] env[61978]: _type = "Task" [ 1187.793868] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.802733] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52920e6d-095c-5065-fae6-9c1274643c1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.900687] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395606, 'name': PowerOnVM_Task} progress is 87%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.095399] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.105117] env[61978]: DEBUG oslo_vmware.api [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395607, 'name': ReconfigVM_Task, 'duration_secs': 0.299007} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.105528] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Reconfigured VM instance instance-0000003a to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1188.110670] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c554e19-178b-43ca-b5ac-edffec22fc92 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.122034] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.380s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.122560] env[61978]: DEBUG nova.compute.manager [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1188.125252] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.030s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.125488] env[61978]: DEBUG nova.objects.instance [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Lazy-loading 'resources' on Instance uuid 5d1d19d8-241b-41b8-b1c0-caf54f8fd600 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1188.132732] env[61978]: DEBUG oslo_vmware.api [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1188.132732] env[61978]: value = "task-1395612" [ 1188.132732] env[61978]: _type = "Task" [ 1188.132732] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.146341] env[61978]: DEBUG oslo_vmware.api [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395612, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.203373] env[61978]: DEBUG oslo_vmware.api [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Task: {'id': task-1395609, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251682} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.204061] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1188.204338] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1188.204567] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1188.205330] env[61978]: INFO nova.compute.manager [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1188.205330] env[61978]: DEBUG oslo.service.loopingcall [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1188.205330] env[61978]: DEBUG nova.compute.manager [-] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1188.205330] env[61978]: DEBUG nova.network.neutron [-] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1188.221952] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395610, 'name': PowerOffVM_Task, 'duration_secs': 0.247252} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.222269] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1188.222451] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance 'b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2' progress to 17 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1188.264333] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395611, 'name': Rename_Task, 'duration_secs': 0.204383} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.264706] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1188.265055] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e6eaa47-a827-4871-9d26-879e6d05f978 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.272985] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1188.272985] env[61978]: value = "task-1395613" [ 1188.272985] env[61978]: _type = "Task" [ 1188.272985] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.280994] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395613, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.304690] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52920e6d-095c-5065-fae6-9c1274643c1e, 'name': SearchDatastore_Task, 'duration_secs': 0.013926} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.304970] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.305235] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1188.305492] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.305677] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.305878] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1188.306259] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d5e9c9e-1a87-4fc4-a8f8-af27845aeee1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.317100] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1188.317323] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1188.318216] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3070030-0d0c-4a98-b84b-63eb9c22e35f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.323706] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1188.323706] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a99c6a-8bc9-ba5a-2d8f-c54111d1516a" [ 1188.323706] env[61978]: _type = "Task" [ 1188.323706] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.332174] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a99c6a-8bc9-ba5a-2d8f-c54111d1516a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.333228] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1188.333451] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295994', 'volume_id': '96d26999-1056-47ad-b42f-1ccc31fe5872', 'name': 'volume-96d26999-1056-47ad-b42f-1ccc31fe5872', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6', 'attached_at': '', 'detached_at': '', 'volume_id': '96d26999-1056-47ad-b42f-1ccc31fe5872', 'serial': '96d26999-1056-47ad-b42f-1ccc31fe5872'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1188.334253] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25746fb-5eb9-4fcb-933a-f02e2ec4efbe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.351362] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca4a0f8-fd29-422b-8917-f69221bf53ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.377212] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] volume-96d26999-1056-47ad-b42f-1ccc31fe5872/volume-96d26999-1056-47ad-b42f-1ccc31fe5872.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.377578] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-300427dd-5203-4721-9cea-1f5738e076a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.399936] env[61978]: DEBUG oslo_vmware.api [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395606, 'name': PowerOnVM_Task, 'duration_secs': 0.769247} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.401244] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1188.401467] env[61978]: DEBUG nova.compute.manager [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1188.401848] env[61978]: DEBUG oslo_vmware.api [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1188.401848] env[61978]: value = "task-1395614" [ 1188.401848] env[61978]: _type = "Task" [ 1188.401848] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.402560] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a605cbb9-3853-4107-88e5-2362eef06390 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.413370] env[61978]: DEBUG oslo_vmware.api [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395614, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.612553] env[61978]: DEBUG nova.compute.manager [req-3486dd77-df91-4a0f-a648-0be9bae92367 req-d91047b0-f1ca-4b0d-9c3d-cd7c96ebe9d6 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received event network-changed-3c5e24a1-8ef7-45a5-a39a-4ce790adc338 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1188.612896] env[61978]: DEBUG nova.compute.manager [req-3486dd77-df91-4a0f-a648-0be9bae92367 req-d91047b0-f1ca-4b0d-9c3d-cd7c96ebe9d6 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Refreshing instance network info cache due to event network-changed-3c5e24a1-8ef7-45a5-a39a-4ce790adc338. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1188.613164] env[61978]: DEBUG oslo_concurrency.lockutils [req-3486dd77-df91-4a0f-a648-0be9bae92367 req-d91047b0-f1ca-4b0d-9c3d-cd7c96ebe9d6 service nova] Acquiring lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.613405] env[61978]: DEBUG oslo_concurrency.lockutils [req-3486dd77-df91-4a0f-a648-0be9bae92367 req-d91047b0-f1ca-4b0d-9c3d-cd7c96ebe9d6 service nova] Acquired lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.613687] env[61978]: DEBUG nova.network.neutron [req-3486dd77-df91-4a0f-a648-0be9bae92367 req-d91047b0-f1ca-4b0d-9c3d-cd7c96ebe9d6 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Refreshing network info cache for port 3c5e24a1-8ef7-45a5-a39a-4ce790adc338 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1188.629184] env[61978]: DEBUG nova.compute.utils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1188.633855] env[61978]: DEBUG nova.compute.manager [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1188.634095] env[61978]: DEBUG nova.network.neutron [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1188.646991] env[61978]: DEBUG oslo_vmware.api [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395612, 'name': ReconfigVM_Task, 'duration_secs': 0.189519} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.647916] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295958', 'volume_id': 'df5c8cba-6cf8-4d47-9b7b-37971eba01d7', 'name': 'volume-df5c8cba-6cf8-4d47-9b7b-37971eba01d7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9ee04ee8-98ec-4be9-935d-cad7cd176466', 'attached_at': '', 'detached_at': '', 'volume_id': 'df5c8cba-6cf8-4d47-9b7b-37971eba01d7', 'serial': 'df5c8cba-6cf8-4d47-9b7b-37971eba01d7'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1188.684268] env[61978]: DEBUG nova.policy [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a208cee3d9c4efb8240ad943b55e915', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86ad52b551104a2594f1dbbc287f9efa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1188.730512] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1188.732803] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1188.732803] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1188.732803] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1188.732803] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1188.732803] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1188.732803] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1188.732803] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1188.732803] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1188.732803] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1188.732803] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1188.739713] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2917913a-e7f4-4f04-8f59-0fefe93fe32d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.759740] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1188.759740] env[61978]: value = "task-1395615" [ 1188.759740] env[61978]: _type = "Task" [ 1188.759740] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.772121] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395615, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.784040] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395613, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.838878] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a99c6a-8bc9-ba5a-2d8f-c54111d1516a, 'name': SearchDatastore_Task, 'duration_secs': 0.010027} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.839770] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a99567c-1ed7-4729-9565-577d7142c9d6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.848157] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1188.848157] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526500ed-4528-2854-495b-d7523e349bdf" [ 1188.848157] env[61978]: _type = "Task" [ 1188.848157] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.859979] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526500ed-4528-2854-495b-d7523e349bdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.924847] env[61978]: DEBUG oslo_vmware.api [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.932788] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.968314] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877fc0ec-4603-48ac-a40e-978ffcf77688 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.977835] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9197a67-e0a1-4def-bad1-a6bc5cfaf60b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.014079] env[61978]: DEBUG nova.network.neutron [-] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.016988] env[61978]: DEBUG nova.network.neutron [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Successfully created port: c97a7df3-acef-43d7-9e3b-2117f142a29d {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1189.019687] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59c07d5-82cd-4159-a879-c2b0f9e95698 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.029708] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5a913b-94e1-4fab-8d31-3878d871ba63 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.044336] env[61978]: DEBUG nova.compute.provider_tree [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1189.134782] env[61978]: DEBUG nova.compute.manager [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1189.173556] env[61978]: DEBUG oslo_vmware.rw_handles [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5257e5b6-e864-72be-3481-4ceb7685e5a0/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1189.175059] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501e36e5-b676-4a63-b648-777a5eb0f417 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.181613] env[61978]: DEBUG oslo_vmware.rw_handles [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5257e5b6-e864-72be-3481-4ceb7685e5a0/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1189.181816] env[61978]: ERROR oslo_vmware.rw_handles [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5257e5b6-e864-72be-3481-4ceb7685e5a0/disk-0.vmdk due to incomplete transfer. [ 1189.182365] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4a1f4027-5adc-45e0-bc4c-ac0569cc15b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.189941] env[61978]: DEBUG oslo_vmware.rw_handles [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5257e5b6-e864-72be-3481-4ceb7685e5a0/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1189.190211] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Uploaded image 443a8916-4f98-4cb9-9e27-49dd792e901d to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1189.192362] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1189.192890] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d786e9ee-23e4-416d-92ad-35781f4a3f10 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.195455] env[61978]: DEBUG nova.objects.instance [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lazy-loading 'flavor' on Instance uuid 9ee04ee8-98ec-4be9-935d-cad7cd176466 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1189.200576] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1189.200576] env[61978]: value = "task-1395616" [ 1189.200576] env[61978]: _type = "Task" [ 1189.200576] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.213284] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395616, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.269593] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395615, 'name': ReconfigVM_Task, 'duration_secs': 0.207134} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.269930] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance 'b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2' progress to 33 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1189.282174] env[61978]: DEBUG oslo_vmware.api [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395613, 'name': PowerOnVM_Task, 'duration_secs': 0.68346} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.282420] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1189.282764] env[61978]: INFO nova.compute.manager [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Took 9.48 seconds to spawn the instance on the hypervisor. [ 1189.283089] env[61978]: DEBUG nova.compute.manager [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1189.284438] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21d8efd-448c-4ab7-876e-61db53da26a1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.359556] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526500ed-4528-2854-495b-d7523e349bdf, 'name': SearchDatastore_Task, 'duration_secs': 0.012736} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.362509] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.363063] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 68791dff-12e0-499d-8835-1e9173af570f/68791dff-12e0-499d-8835-1e9173af570f.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1189.363759] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8472369d-f64e-4c53-a21e-97de7a116d7e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.370209] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1189.370209] env[61978]: value = "task-1395617" [ 1189.370209] env[61978]: _type = "Task" [ 1189.370209] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.378733] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.415187] env[61978]: DEBUG oslo_vmware.api [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395614, 'name': ReconfigVM_Task, 'duration_secs': 0.712261} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.415529] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Reconfigured VM instance instance-0000004b to attach disk [datastore1] volume-96d26999-1056-47ad-b42f-1ccc31fe5872/volume-96d26999-1056-47ad-b42f-1ccc31fe5872.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.420592] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eaffa18f-04b3-4d80-a503-9ecccc39606d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.441769] env[61978]: DEBUG oslo_vmware.api [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1189.441769] env[61978]: value = "task-1395618" [ 1189.441769] env[61978]: _type = "Task" [ 1189.441769] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.451769] env[61978]: DEBUG oslo_vmware.api [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395618, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.469915] env[61978]: DEBUG nova.network.neutron [req-3486dd77-df91-4a0f-a648-0be9bae92367 req-d91047b0-f1ca-4b0d-9c3d-cd7c96ebe9d6 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updated VIF entry in instance network info cache for port 3c5e24a1-8ef7-45a5-a39a-4ce790adc338. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1189.470119] env[61978]: DEBUG nova.network.neutron [req-3486dd77-df91-4a0f-a648-0be9bae92367 req-d91047b0-f1ca-4b0d-9c3d-cd7c96ebe9d6 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updating instance_info_cache with network_info: [{"id": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "address": "fa:16:3e:14:c6:36", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5e24a1-8e", "ovs_interfaceid": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.517107] env[61978]: INFO nova.compute.manager [-] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Took 1.31 seconds to deallocate network for instance. [ 1189.572687] env[61978]: ERROR nova.scheduler.client.report [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] [req-c66ec52a-e414-445f-aa58-8161516dd9a4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c66ec52a-e414-445f-aa58-8161516dd9a4"}]} [ 1189.595819] env[61978]: DEBUG nova.scheduler.client.report [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1189.612812] env[61978]: DEBUG nova.scheduler.client.report [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1189.613253] env[61978]: DEBUG nova.compute.provider_tree [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1189.625919] env[61978]: DEBUG nova.scheduler.client.report [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1189.648502] env[61978]: DEBUG nova.scheduler.client.report [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1189.710322] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395616, 'name': Destroy_Task, 'duration_secs': 0.490786} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.711083] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Destroyed the VM [ 1189.711083] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1189.713975] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fc30a410-7d12-4f9e-9eda-2cf1b3f9708e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.722012] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1189.722012] env[61978]: value = "task-1395619" [ 1189.722012] env[61978]: _type = "Task" [ 1189.722012] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.732309] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395619, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.776674] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1189.777097] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1189.777345] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1189.777638] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1189.777876] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1189.778149] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1189.778523] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1189.778753] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1189.778957] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1189.779169] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1189.779360] env[61978]: DEBUG nova.virt.hardware [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1189.785198] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Reconfiguring VM instance instance-00000052 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1189.788567] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e33eb2a5-9c25-4f31-b1c8-2b34d57c52ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.815558] env[61978]: INFO nova.compute.manager [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Took 15.74 seconds to build instance. [ 1189.818686] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1189.818686] env[61978]: value = "task-1395620" [ 1189.818686] env[61978]: _type = "Task" [ 1189.818686] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.833880] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395620, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.885996] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395617, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.952344] env[61978]: DEBUG oslo_vmware.api [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395618, 'name': ReconfigVM_Task, 'duration_secs': 0.216072} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.952705] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295994', 'volume_id': '96d26999-1056-47ad-b42f-1ccc31fe5872', 'name': 'volume-96d26999-1056-47ad-b42f-1ccc31fe5872', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6', 'attached_at': '', 'detached_at': '', 'volume_id': '96d26999-1056-47ad-b42f-1ccc31fe5872', 'serial': '96d26999-1056-47ad-b42f-1ccc31fe5872'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1189.975225] env[61978]: DEBUG oslo_concurrency.lockutils [req-3486dd77-df91-4a0f-a648-0be9bae92367 req-d91047b0-f1ca-4b0d-9c3d-cd7c96ebe9d6 service nova] Releasing lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.975576] env[61978]: DEBUG nova.compute.manager [req-3486dd77-df91-4a0f-a648-0be9bae92367 req-d91047b0-f1ca-4b0d-9c3d-cd7c96ebe9d6 service nova] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Received event network-vif-deleted-4a5f6d6c-8742-44fb-823a-a586923aaa5d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1189.975859] env[61978]: INFO nova.compute.manager [req-3486dd77-df91-4a0f-a648-0be9bae92367 req-d91047b0-f1ca-4b0d-9c3d-cd7c96ebe9d6 service nova] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Neutron deleted interface 4a5f6d6c-8742-44fb-823a-a586923aaa5d; detaching it from the instance and deleting it from the info cache [ 1189.975997] env[61978]: DEBUG nova.network.neutron [req-3486dd77-df91-4a0f-a648-0be9bae92367 req-d91047b0-f1ca-4b0d-9c3d-cd7c96ebe9d6 service nova] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.995459] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73669c74-2d72-478a-b803-567ba3ebc009 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.007061] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616b2570-9b1f-4d5d-a3d9-893894cb0914 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.053548] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.055315] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d4570c-ddc0-4356-847d-aec6f3ef27c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.069455] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c612a4-d8f2-4b21-afce-ceac5d4f2b0f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.083914] env[61978]: DEBUG nova.compute.provider_tree [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1190.145196] env[61978]: DEBUG nova.compute.manager [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1190.172563] env[61978]: DEBUG nova.virt.hardware [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1190.172818] env[61978]: DEBUG nova.virt.hardware [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1190.172983] env[61978]: DEBUG nova.virt.hardware [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1190.173188] env[61978]: DEBUG nova.virt.hardware [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1190.173343] env[61978]: DEBUG nova.virt.hardware [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1190.173496] env[61978]: DEBUG nova.virt.hardware [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1190.173711] env[61978]: DEBUG nova.virt.hardware [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1190.173879] env[61978]: DEBUG nova.virt.hardware [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1190.174086] env[61978]: DEBUG nova.virt.hardware [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1190.174273] env[61978]: DEBUG nova.virt.hardware [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1190.174457] env[61978]: DEBUG nova.virt.hardware [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1190.175336] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd51c4b4-8241-42b6-bf80-b51cff380f93 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.184473] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175d8870-dd49-4435-98d4-a811f691236c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.205351] env[61978]: DEBUG oslo_concurrency.lockutils [None req-131c9254-04ad-4f4d-b977-2f3fe97341d0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.283s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.230583] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395619, 'name': RemoveSnapshot_Task, 'duration_secs': 0.441258} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.230865] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1190.231178] env[61978]: DEBUG nova.compute.manager [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1190.231938] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5a02a1-2c48-4816-aec5-adcfeea4f8d4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.328266] env[61978]: DEBUG oslo_concurrency.lockutils [None req-63a677c8-fe1f-41f0-8c61-9279fe21fd6f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "c0be687a-7444-4019-8b12-dac41a7c080e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.272s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.328492] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395620, 'name': ReconfigVM_Task, 'duration_secs': 0.283415} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.328746] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Reconfigured VM instance instance-00000052 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1190.329545] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43548294-ccf8-464a-a2c3-b3c6132f7535 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.352286] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2/b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1190.352850] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1ca0d7e-6ca8-4282-ade5-9476b498b188 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.371410] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1190.371410] env[61978]: value = "task-1395621" [ 1190.371410] env[61978]: _type = "Task" [ 1190.371410] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.381805] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395621, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.384701] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395617, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.828942} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.384939] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 68791dff-12e0-499d-8835-1e9173af570f/68791dff-12e0-499d-8835-1e9173af570f.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1190.385167] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1190.385402] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca7810b6-3a18-41f6-a6ac-d16d6d2ba0e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.391172] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1190.391172] env[61978]: value = "task-1395622" [ 1190.391172] env[61978]: _type = "Task" [ 1190.391172] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.398362] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395622, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.479202] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-20442b48-6513-4dd6-80d9-4257027068b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.491469] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d5ef92-4574-4b6a-b1ea-69e4f761942c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.504479] env[61978]: DEBUG nova.objects.instance [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'flavor' on Instance uuid a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.532627] env[61978]: DEBUG nova.compute.manager [req-3486dd77-df91-4a0f-a648-0be9bae92367 req-d91047b0-f1ca-4b0d-9c3d-cd7c96ebe9d6 service nova] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Detach interface failed, port_id=4a5f6d6c-8742-44fb-823a-a586923aaa5d, reason: Instance 612aba6c-a30d-4eeb-8f85-e791bda55582 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1190.620202] env[61978]: DEBUG nova.scheduler.client.report [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 111 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1190.620560] env[61978]: DEBUG nova.compute.provider_tree [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 111 to 112 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1190.620781] env[61978]: DEBUG nova.compute.provider_tree [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 184, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1190.699095] env[61978]: DEBUG nova.compute.manager [req-6052b210-8249-4555-aa92-4c614436d4f4 req-16a892ab-ca97-4331-ae86-b09cab1cf2bd service nova] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Received event network-vif-plugged-c97a7df3-acef-43d7-9e3b-2117f142a29d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1190.699353] env[61978]: DEBUG oslo_concurrency.lockutils [req-6052b210-8249-4555-aa92-4c614436d4f4 req-16a892ab-ca97-4331-ae86-b09cab1cf2bd service nova] Acquiring lock "97e128f9-7135-46b0-b22a-ee5449ba48b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.699595] env[61978]: DEBUG oslo_concurrency.lockutils [req-6052b210-8249-4555-aa92-4c614436d4f4 req-16a892ab-ca97-4331-ae86-b09cab1cf2bd service nova] Lock "97e128f9-7135-46b0-b22a-ee5449ba48b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.699781] env[61978]: DEBUG oslo_concurrency.lockutils [req-6052b210-8249-4555-aa92-4c614436d4f4 req-16a892ab-ca97-4331-ae86-b09cab1cf2bd service nova] Lock "97e128f9-7135-46b0-b22a-ee5449ba48b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.699923] env[61978]: DEBUG nova.compute.manager [req-6052b210-8249-4555-aa92-4c614436d4f4 req-16a892ab-ca97-4331-ae86-b09cab1cf2bd service nova] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] No waiting events found dispatching network-vif-plugged-c97a7df3-acef-43d7-9e3b-2117f142a29d {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1190.700655] env[61978]: WARNING nova.compute.manager [req-6052b210-8249-4555-aa92-4c614436d4f4 req-16a892ab-ca97-4331-ae86-b09cab1cf2bd service nova] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Received unexpected event network-vif-plugged-c97a7df3-acef-43d7-9e3b-2117f142a29d for instance with vm_state building and task_state spawning. [ 1190.719328] env[61978]: DEBUG nova.network.neutron [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Successfully updated port: c97a7df3-acef-43d7-9e3b-2117f142a29d {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1190.744541] env[61978]: INFO nova.compute.manager [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Shelve offloading [ 1190.746546] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1190.746753] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0caa4d65-534b-408b-a712-9e3ba49b4a3f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.754666] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1190.754666] env[61978]: value = "task-1395623" [ 1190.754666] env[61978]: _type = "Task" [ 1190.754666] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.763429] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1190.763631] env[61978]: DEBUG nova.compute.manager [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1190.764429] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff415a5b-6a4f-4939-bd7e-c9308567ca53 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.772023] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.772271] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.772789] env[61978]: DEBUG nova.network.neutron [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.784710] env[61978]: DEBUG oslo_concurrency.lockutils [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.784948] env[61978]: DEBUG oslo_concurrency.lockutils [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.881793] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395621, 'name': ReconfigVM_Task, 'duration_secs': 0.2791} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.882089] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Reconfigured VM instance instance-00000052 to attach disk [datastore2] b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2/b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1190.882365] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance 'b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2' progress to 50 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1190.901929] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395622, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133281} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.902204] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1190.902856] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a89f411-8e9d-4094-bb46-1bbdab40238b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.925658] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 68791dff-12e0-499d-8835-1e9173af570f/68791dff-12e0-499d-8835-1e9173af570f.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1190.925960] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1203cb42-1619-4c8d-a198-f524a8488ccf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.947980] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1190.947980] env[61978]: value = "task-1395624" [ 1190.947980] env[61978]: _type = "Task" [ 1190.947980] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.955977] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395624, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.011902] env[61978]: DEBUG oslo_concurrency.lockutils [None req-598886b9-b2fc-4674-b39d-69782df85062 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.295s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.127163] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.002s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.129596] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.197s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.129795] env[61978]: DEBUG nova.objects.instance [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61978) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1191.156258] env[61978]: INFO nova.scheduler.client.report [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Deleted allocations for instance 5d1d19d8-241b-41b8-b1c0-caf54f8fd600 [ 1191.222990] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-97e128f9-7135-46b0-b22a-ee5449ba48b6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1191.222990] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-97e128f9-7135-46b0-b22a-ee5449ba48b6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.222990] env[61978]: DEBUG nova.network.neutron [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1191.288196] env[61978]: INFO nova.compute.manager [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Detaching volume 861ed39b-543d-436d-a50d-364cfadf8e50 [ 1191.334587] env[61978]: INFO nova.virt.block_device [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Attempting to driver detach volume 861ed39b-543d-436d-a50d-364cfadf8e50 from mountpoint /dev/sdb [ 1191.334756] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1191.334957] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295980', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'name': 'volume-861ed39b-543d-436d-a50d-364cfadf8e50', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ae6b92bb-6f79-4b52-bdb7-095985bf2fad', 'attached_at': '', 'detached_at': '', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'serial': '861ed39b-543d-436d-a50d-364cfadf8e50'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1191.335887] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c678dc00-5287-464e-958c-705b48f3e38d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.361769] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b8f791-97b8-4d59-aed9-7ae0a9ec5011 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.364538] env[61978]: DEBUG oslo_concurrency.lockutils [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1191.364769] env[61978]: DEBUG oslo_concurrency.lockutils [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.364978] env[61978]: DEBUG oslo_concurrency.lockutils [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "9ee04ee8-98ec-4be9-935d-cad7cd176466-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1191.365187] env[61978]: DEBUG oslo_concurrency.lockutils [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.365368] env[61978]: DEBUG oslo_concurrency.lockutils [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.368775] env[61978]: INFO nova.compute.manager [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Terminating instance [ 1191.370471] env[61978]: DEBUG nova.compute.manager [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1191.370690] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1191.371462] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0107f1-115b-419c-98ef-62afd533b186 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.376279] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140a25c5-c3b1-4e39-b0c5-7dcba10e9763 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.380319] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1191.380823] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06146155-90e4-49a8-ae00-1a21e29ca08f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.403410] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc64eae-e1ce-4f4b-bbf0-4a770a1c529d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.406815] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1211669-317d-4f59-bb86-a7012792a880 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.409410] env[61978]: DEBUG oslo_vmware.api [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1191.409410] env[61978]: value = "task-1395625" [ 1191.409410] env[61978]: _type = "Task" [ 1191.409410] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.435835] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] The volume has not been displaced from its original location: [datastore2] volume-861ed39b-543d-436d-a50d-364cfadf8e50/volume-861ed39b-543d-436d-a50d-364cfadf8e50.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1191.441196] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Reconfiguring VM instance instance-00000046 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1191.442038] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-610bd741-2794-49f7-8e8b-edca27b35c0e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.455591] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6438eb-73f1-421a-a9de-90f44ae22c0d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.463211] env[61978]: DEBUG oslo_vmware.api [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395625, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.471792] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395624, 'name': ReconfigVM_Task, 'duration_secs': 0.301117} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.486090] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 68791dff-12e0-499d-8835-1e9173af570f/68791dff-12e0-499d-8835-1e9173af570f.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1191.486924] env[61978]: DEBUG oslo_vmware.api [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1191.486924] env[61978]: value = "task-1395626" [ 1191.486924] env[61978]: _type = "Task" [ 1191.486924] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.487244] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance 'b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2' progress to 67 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1191.490863] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19f1f827-b3b2-41a1-a1ba-3e72fedcc412 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.504887] env[61978]: DEBUG oslo_vmware.api [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395626, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.506377] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1191.506377] env[61978]: value = "task-1395627" [ 1191.506377] env[61978]: _type = "Task" [ 1191.506377] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.517133] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395627, 'name': Rename_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.615320] env[61978]: DEBUG nova.compute.manager [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1191.616722] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27867de-2825-4df3-97d7-fa72a89e9652 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.647365] env[61978]: DEBUG nova.network.neutron [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Updating instance_info_cache with network_info: [{"id": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "address": "fa:16:3e:1e:db:c1", "network": {"id": "aa63ec3f-fde3-49f2-ab12-71ae85601428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-445619089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198eab494c0a4e0eb83bae5732df9c78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "650f7968-4522-4ba5-8304-1b9949951ed7", "external-id": "nsx-vlan-transportzone-568", "segmentation_id": 568, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbba3eeec-25", "ovs_interfaceid": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.665718] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4646347a-95a6-4972-9791-2fb21c21c118 tempest-InstanceActionsV221TestJSON-529772804 tempest-InstanceActionsV221TestJSON-529772804-project-member] Lock "5d1d19d8-241b-41b8-b1c0-caf54f8fd600" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.624s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.770413] env[61978]: DEBUG nova.network.neutron [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1191.923552] env[61978]: DEBUG oslo_vmware.api [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395625, 'name': PowerOffVM_Task, 'duration_secs': 0.195853} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.924995] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1191.924995] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1191.926430] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-184a192c-84dd-498a-a77d-3bf282ca81ca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.980276] env[61978]: DEBUG nova.network.neutron [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Updating instance_info_cache with network_info: [{"id": "c97a7df3-acef-43d7-9e3b-2117f142a29d", "address": "fa:16:3e:61:9c:64", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc97a7df3-ac", "ovs_interfaceid": "c97a7df3-acef-43d7-9e3b-2117f142a29d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.015155] env[61978]: DEBUG oslo_vmware.api [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395626, 'name': ReconfigVM_Task, 'duration_secs': 0.260946} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.015849] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Reconfigured VM instance instance-00000046 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1192.028380] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c38822db-5cec-4cc9-a67e-fae972d6da6c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.040411] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395627, 'name': Rename_Task, 'duration_secs': 0.157156} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.041094] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1192.041406] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1192.041603] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1192.041788] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Deleting the datastore file [datastore2] 9ee04ee8-98ec-4be9-935d-cad7cd176466 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1192.042401] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8712cbd5-4021-4424-a10f-13d1757d0f93 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.044761] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc20c181-0013-4702-bdb8-14e89104f5f5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.047714] env[61978]: DEBUG oslo_vmware.api [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1192.047714] env[61978]: value = "task-1395629" [ 1192.047714] env[61978]: _type = "Task" [ 1192.047714] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.054112] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1192.054112] env[61978]: value = "task-1395630" [ 1192.054112] env[61978]: _type = "Task" [ 1192.054112] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.054396] env[61978]: DEBUG oslo_vmware.api [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1192.054396] env[61978]: value = "task-1395631" [ 1192.054396] env[61978]: _type = "Task" [ 1192.054396] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.065734] env[61978]: DEBUG oslo_vmware.api [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395629, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.070847] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395630, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.074605] env[61978]: DEBUG oslo_vmware.api [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.083953] env[61978]: DEBUG oslo_concurrency.lockutils [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.085657] env[61978]: DEBUG oslo_concurrency.lockutils [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.088193] env[61978]: DEBUG nova.network.neutron [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Port 2efcc135-18f4-45d3-9408-817cdbada770 binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1192.128623] env[61978]: INFO nova.compute.manager [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] instance snapshotting [ 1192.136743] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459b73bc-32a3-4ff4-8432-723e7245bb59 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.144478] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ba240b63-5504-47b0-a8bb-87d141681d61 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.145695] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.092s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.146067] env[61978]: DEBUG nova.objects.instance [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lazy-loading 'resources' on Instance uuid 612aba6c-a30d-4eeb-8f85-e791bda55582 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1192.149679] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Releasing lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.174123] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03abfb8-124d-48bb-ad57-cfdddae72564 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.484501] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-97e128f9-7135-46b0-b22a-ee5449ba48b6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.484938] env[61978]: DEBUG nova.compute.manager [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Instance network_info: |[{"id": "c97a7df3-acef-43d7-9e3b-2117f142a29d", "address": "fa:16:3e:61:9c:64", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc97a7df3-ac", "ovs_interfaceid": "c97a7df3-acef-43d7-9e3b-2117f142a29d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1192.486105] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:9c:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c97a7df3-acef-43d7-9e3b-2117f142a29d', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1192.496552] env[61978]: DEBUG oslo.service.loopingcall [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1192.496896] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1192.497788] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3402f861-444a-48ab-8413-ddec3360d71f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.520849] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1192.520849] env[61978]: value = "task-1395632" [ 1192.520849] env[61978]: _type = "Task" [ 1192.520849] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.530265] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395632, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.567308] env[61978]: DEBUG oslo_vmware.api [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395629, 'name': ReconfigVM_Task, 'duration_secs': 0.174384} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.569120] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295980', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'name': 'volume-861ed39b-543d-436d-a50d-364cfadf8e50', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ae6b92bb-6f79-4b52-bdb7-095985bf2fad', 'attached_at': '', 'detached_at': '', 'volume_id': '861ed39b-543d-436d-a50d-364cfadf8e50', 'serial': '861ed39b-543d-436d-a50d-364cfadf8e50'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1192.580245] env[61978]: DEBUG oslo_vmware.api [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.298627} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.581926] env[61978]: DEBUG oslo_vmware.api [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395630, 'name': PowerOnVM_Task} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.581926] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1192.581926] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1192.581926] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1192.581926] env[61978]: INFO nova.compute.manager [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1192.581926] env[61978]: DEBUG oslo.service.loopingcall [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1192.581926] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1192.582420] env[61978]: INFO nova.compute.manager [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Took 9.27 seconds to spawn the instance on the hypervisor. [ 1192.582689] env[61978]: DEBUG nova.compute.manager [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1192.583230] env[61978]: DEBUG nova.compute.manager [-] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1192.583364] env[61978]: DEBUG nova.network.neutron [-] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1192.586404] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0b1051-6d34-4693-a06c-6db07a814696 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.592203] env[61978]: DEBUG nova.compute.utils [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1192.691709] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1192.693080] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a1f6c0f4-be0b-44f5-aae5-40315aaa9268 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.699812] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1192.699812] env[61978]: value = "task-1395633" [ 1192.699812] env[61978]: _type = "Task" [ 1192.699812] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.708915] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395633, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.934816] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1192.934816] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a9b40a-f6e1-4bca-8c38-d88333162926 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.942079] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1192.942688] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b7ba6b3-b8c7-4ceb-91eb-e4ac45ffdff0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.946588] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785b1d8e-e0a7-425b-b540-36185ca93132 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.957241] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0485dcf0-82cb-4968-9a0c-652d6ec091a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.998143] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04dd5eac-bec5-4b03-89bb-9b3de5099db7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.006335] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d208f6a6-ea9c-4e6b-b1a6-944debf2aff1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.021870] env[61978]: DEBUG nova.compute.provider_tree [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1193.028739] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1193.028991] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1193.029218] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Deleting the datastore file [datastore1] 17c56c1c-9992-4559-ad23-c68909ae6792 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1193.029506] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcf3a054-a009-49c0-b175-63b6e0eb1724 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.036908] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395632, 'name': CreateVM_Task, 'duration_secs': 0.362164} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.038775] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1193.039148] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1193.039148] env[61978]: value = "task-1395635" [ 1193.039148] env[61978]: _type = "Task" [ 1193.039148] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.039816] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1193.039979] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.040472] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1193.040905] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b407e0c5-3b31-4223-a6f0-31c6673a7378 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.049118] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1193.049118] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cea58e-99d1-3b0a-cebc-aec4064ed3a5" [ 1193.049118] env[61978]: _type = "Task" [ 1193.049118] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.052724] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395635, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.061319] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cea58e-99d1-3b0a-cebc-aec4064ed3a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.112401] env[61978]: DEBUG oslo_concurrency.lockutils [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.027s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.123853] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.127719] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.127719] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.131203] env[61978]: INFO nova.compute.manager [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Took 15.42 seconds to build instance. [ 1193.157461] env[61978]: DEBUG nova.objects.instance [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lazy-loading 'flavor' on Instance uuid ae6b92bb-6f79-4b52-bdb7-095985bf2fad {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.211679] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395633, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.232270] env[61978]: DEBUG nova.compute.manager [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Received event network-changed-c97a7df3-acef-43d7-9e3b-2117f142a29d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1193.232605] env[61978]: DEBUG nova.compute.manager [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Refreshing instance network info cache due to event network-changed-c97a7df3-acef-43d7-9e3b-2117f142a29d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1193.232979] env[61978]: DEBUG oslo_concurrency.lockutils [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] Acquiring lock "refresh_cache-97e128f9-7135-46b0-b22a-ee5449ba48b6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1193.233255] env[61978]: DEBUG oslo_concurrency.lockutils [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] Acquired lock "refresh_cache-97e128f9-7135-46b0-b22a-ee5449ba48b6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.233511] env[61978]: DEBUG nova.network.neutron [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Refreshing network info cache for port c97a7df3-acef-43d7-9e3b-2117f142a29d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1193.550386] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395635, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.564297] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cea58e-99d1-3b0a-cebc-aec4064ed3a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.568515] env[61978]: DEBUG nova.scheduler.client.report [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 112 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1193.568777] env[61978]: DEBUG nova.compute.provider_tree [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 112 to 113 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1193.568965] env[61978]: DEBUG nova.compute.provider_tree [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1193.636343] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2e0f31ca-5c5b-4240-b0d5-f08997aadf5c tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "68791dff-12e0-499d-8835-1e9173af570f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.982s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.681527] env[61978]: DEBUG nova.objects.instance [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lazy-loading 'flavor' on Instance uuid 1eae10e8-58b1-435d-86fc-0674725ce6cd {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.713936] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395633, 'name': CreateSnapshot_Task, 'duration_secs': 0.930566} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.714793] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1193.715914] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703e898e-8960-4e63-a017-6d3e8d622f3b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.998765] env[61978]: DEBUG nova.network.neutron [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Updated VIF entry in instance network info cache for port c97a7df3-acef-43d7-9e3b-2117f142a29d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1193.999532] env[61978]: DEBUG nova.network.neutron [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Updating instance_info_cache with network_info: [{"id": "c97a7df3-acef-43d7-9e3b-2117f142a29d", "address": "fa:16:3e:61:9c:64", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc97a7df3-ac", "ovs_interfaceid": "c97a7df3-acef-43d7-9e3b-2117f142a29d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.013547] env[61978]: DEBUG nova.network.neutron [-] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.052794] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395635, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.061344] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cea58e-99d1-3b0a-cebc-aec4064ed3a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.073685] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.928s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.109895] env[61978]: INFO nova.scheduler.client.report [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Deleted allocations for instance 612aba6c-a30d-4eeb-8f85-e791bda55582 [ 1194.163660] env[61978]: DEBUG oslo_concurrency.lockutils [None req-91d78e38-febc-47a7-b7b4-2b6e80f19e9a tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.378s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.168961] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1194.169226] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.169851] env[61978]: DEBUG nova.network.neutron [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1194.179018] env[61978]: DEBUG oslo_concurrency.lockutils [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.179018] env[61978]: DEBUG oslo_concurrency.lockutils [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.179018] env[61978]: INFO nova.compute.manager [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Attaching volume 6f97516a-a581-42c8-8158-14d54c5b9874 to /dev/sdc [ 1194.186906] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquiring lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1194.187506] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquired lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.235137] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1194.238179] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7a1ae954-0acf-4727-828e-7865f1ecc0cd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.242771] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ac5212-c674-42e3-a38a-f7d7a8fb0800 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.252514] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd16cda-aca5-4633-99e9-bf740a8783f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.256775] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1194.256775] env[61978]: value = "task-1395636" [ 1194.256775] env[61978]: _type = "Task" [ 1194.256775] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.272138] env[61978]: DEBUG nova.virt.block_device [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Updating existing volume attachment record: 72fe215f-759b-45a0-a221-6e3166f7edf9 {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1194.280132] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395636, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.505115] env[61978]: DEBUG oslo_concurrency.lockutils [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] Releasing lock "refresh_cache-97e128f9-7135-46b0-b22a-ee5449ba48b6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1194.505441] env[61978]: DEBUG nova.compute.manager [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Received event network-vif-unplugged-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1194.505653] env[61978]: DEBUG oslo_concurrency.lockutils [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] Acquiring lock "17c56c1c-9992-4559-ad23-c68909ae6792-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.505954] env[61978]: DEBUG oslo_concurrency.lockutils [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] Lock "17c56c1c-9992-4559-ad23-c68909ae6792-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.506103] env[61978]: DEBUG oslo_concurrency.lockutils [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] Lock "17c56c1c-9992-4559-ad23-c68909ae6792-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.506278] env[61978]: DEBUG nova.compute.manager [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] No waiting events found dispatching network-vif-unplugged-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1194.506480] env[61978]: WARNING nova.compute.manager [req-6d0135b5-f8e4-4d8d-9006-845adf9f7479 req-c3a901a0-1fe9-40ea-86de-3fea303151cc service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Received unexpected event network-vif-unplugged-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 for instance with vm_state shelved and task_state shelving_offloading. [ 1194.516777] env[61978]: INFO nova.compute.manager [-] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Took 1.93 seconds to deallocate network for instance. [ 1194.553536] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395635, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.563120] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cea58e-99d1-3b0a-cebc-aec4064ed3a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.620027] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5b434ad-8b49-4984-b62b-66d2f1fc10eb tempest-ImagesTestJSON-1872689461 tempest-ImagesTestJSON-1872689461-project-member] Lock "612aba6c-a30d-4eeb-8f85-e791bda55582" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.079s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.730305] env[61978]: DEBUG nova.network.neutron [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1194.768357] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395636, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.879270] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.879534] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.879749] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.879939] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.880136] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.882468] env[61978]: INFO nova.compute.manager [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Terminating instance [ 1194.884352] env[61978]: DEBUG nova.compute.manager [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1194.884573] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1194.885789] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ac8666-28a6-4b75-9439-8d78e000d246 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.893513] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1194.893869] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f07b71db-d37f-4982-9202-1b1ca4afa296 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.900040] env[61978]: DEBUG oslo_vmware.api [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1194.900040] env[61978]: value = "task-1395638" [ 1194.900040] env[61978]: _type = "Task" [ 1194.900040] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.907386] env[61978]: DEBUG oslo_vmware.api [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395638, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.027795] env[61978]: DEBUG oslo_concurrency.lockutils [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.028261] env[61978]: DEBUG oslo_concurrency.lockutils [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.028516] env[61978]: DEBUG nova.objects.instance [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lazy-loading 'resources' on Instance uuid 9ee04ee8-98ec-4be9-935d-cad7cd176466 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.057435] env[61978]: DEBUG oslo_vmware.api [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395635, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.683588} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.061383] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1195.061638] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1195.061844] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1195.066814] env[61978]: DEBUG nova.network.neutron [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance_info_cache with network_info: [{"id": "2efcc135-18f4-45d3-9408-817cdbada770", "address": "fa:16:3e:8c:cc:2c", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2efcc135-18", "ovs_interfaceid": "2efcc135-18f4-45d3-9408-817cdbada770", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.068989] env[61978]: INFO nova.compute.manager [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Rebuilding instance [ 1195.081763] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cea58e-99d1-3b0a-cebc-aec4064ed3a5, 'name': SearchDatastore_Task, 'duration_secs': 1.520619} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.083090] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1195.083368] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1195.083925] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.083925] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.084079] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1195.084724] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c1e8a9f-75d6-4aef-8361-25dddaa1cf90 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.098016] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1195.098016] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1195.099456] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee9974f6-bbb8-4d25-94a2-eb51952cbc48 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.106681] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1195.106681] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f3049e-87c8-49ab-e21f-6727d08f432f" [ 1195.106681] env[61978]: _type = "Task" [ 1195.106681] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.119847] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f3049e-87c8-49ab-e21f-6727d08f432f, 'name': SearchDatastore_Task, 'duration_secs': 0.010553} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.121886] env[61978]: INFO nova.scheduler.client.report [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Deleted allocations for instance 17c56c1c-9992-4559-ad23-c68909ae6792 [ 1195.129429] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f124ed9-e3d5-482c-a2f4-593557c7470d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.135955] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1195.135955] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522dca51-a5dc-87d5-1fbc-3331dc4da1e8" [ 1195.135955] env[61978]: _type = "Task" [ 1195.135955] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.145439] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522dca51-a5dc-87d5-1fbc-3331dc4da1e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.153160] env[61978]: DEBUG nova.compute.manager [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1195.154071] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894629de-727b-483c-ae2d-2d54123ebc6e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.271968] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395636, 'name': CloneVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.411277] env[61978]: DEBUG oslo_vmware.api [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395638, 'name': PowerOffVM_Task, 'duration_secs': 0.273603} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.413132] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1195.413545] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1195.416893] env[61978]: DEBUG nova.compute.manager [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Received event network-changed-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1195.416893] env[61978]: DEBUG nova.compute.manager [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Refreshing instance network info cache due to event network-changed-bba3eeec-259f-4ea3-b0f6-e509a29d33f4. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1195.416893] env[61978]: DEBUG oslo_concurrency.lockutils [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] Acquiring lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.416893] env[61978]: DEBUG oslo_concurrency.lockutils [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] Acquired lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.417382] env[61978]: DEBUG nova.network.neutron [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Refreshing network info cache for port bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1195.418809] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7cce11a-2159-40ec-a14d-9a30d2628fa8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.507856] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1195.509071] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1195.509458] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleting the datastore file [datastore2] ae6b92bb-6f79-4b52-bdb7-095985bf2fad {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1195.509727] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0af0c599-8065-4ff0-8da2-0cae733f118f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.517849] env[61978]: DEBUG oslo_vmware.api [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1195.517849] env[61978]: value = "task-1395640" [ 1195.517849] env[61978]: _type = "Task" [ 1195.517849] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.529541] env[61978]: DEBUG oslo_vmware.api [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395640, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.574231] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1195.630788] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.646654] env[61978]: DEBUG nova.network.neutron [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Updating instance_info_cache with network_info: [{"id": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "address": "fa:16:3e:79:9a:37", "network": {"id": "74e44162-dbb7-4a19-b344-6133915c4ab5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-893790941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a1c323dddcd42809d565f46ecf5e18f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ba2eb7-45", "ovs_interfaceid": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.659157] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522dca51-a5dc-87d5-1fbc-3331dc4da1e8, 'name': SearchDatastore_Task, 'duration_secs': 0.009169} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.659659] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1195.659989] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 97e128f9-7135-46b0-b22a-ee5449ba48b6/97e128f9-7135-46b0-b22a-ee5449ba48b6.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1195.660530] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70cea9d7-86f5-4a39-a812-cabd0477d22d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.667855] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1195.668391] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-801d6a98-cea7-4dc2-b2d6-9d38ca3366ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.675961] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1195.675961] env[61978]: value = "task-1395641" [ 1195.675961] env[61978]: _type = "Task" [ 1195.675961] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.680700] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1195.680700] env[61978]: value = "task-1395642" [ 1195.680700] env[61978]: _type = "Task" [ 1195.680700] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.691198] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395641, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.697983] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.713850] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Acquiring lock "7d388d5c-2120-4dc5-a04f-5394e1e6f852" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.714192] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Lock "7d388d5c-2120-4dc5-a04f-5394e1e6f852" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.714448] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Acquiring lock "7d388d5c-2120-4dc5-a04f-5394e1e6f852-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.714651] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Lock "7d388d5c-2120-4dc5-a04f-5394e1e6f852-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.714842] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Lock "7d388d5c-2120-4dc5-a04f-5394e1e6f852-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.718748] env[61978]: INFO nova.compute.manager [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Terminating instance [ 1195.723097] env[61978]: DEBUG nova.compute.manager [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1195.723097] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1195.723564] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca81e3e1-1c47-4a10-96f9-cea7b0f310bb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.735024] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1195.735285] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b73ee3a0-3f15-4b90-8877-dcb66cfbb627 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.744805] env[61978]: DEBUG oslo_vmware.api [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Waiting for the task: (returnval){ [ 1195.744805] env[61978]: value = "task-1395643" [ 1195.744805] env[61978]: _type = "Task" [ 1195.744805] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.755268] env[61978]: DEBUG oslo_vmware.api [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.770478] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395636, 'name': CloneVM_Task, 'duration_secs': 1.110186} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.770478] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Created linked-clone VM from snapshot [ 1195.772034] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad3627a-2f99-4118-83a4-c949f1b98077 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.782021] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Uploading image 244c307e-67dc-4161-9e24-d650ad818691 {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1195.801080] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1195.801436] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cebc6c83-f295-45e8-b298-13ca7bc974fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.812902] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1195.812902] env[61978]: value = "task-1395644" [ 1195.812902] env[61978]: _type = "Task" [ 1195.812902] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.819707] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f91856a-8261-4728-afe9-6d9d26d1bd33 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.825295] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395644, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.830210] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7c0787-d8c2-4e42-88f6-bf8f192e0039 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.863068] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df042afa-9cbf-4d9d-852e-0d17a48c3e56 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.870470] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1692b36-a2bc-4b3c-939c-d15c7c5db4a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.885332] env[61978]: DEBUG nova.compute.provider_tree [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1196.029995] env[61978]: DEBUG oslo_vmware.api [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395640, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.340047} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.030369] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1196.030580] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1196.030760] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1196.030944] env[61978]: INFO nova.compute.manager [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1196.031225] env[61978]: DEBUG oslo.service.loopingcall [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1196.031438] env[61978]: DEBUG nova.compute.manager [-] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1196.031536] env[61978]: DEBUG nova.network.neutron [-] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1196.104263] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3f2dae-3139-49df-87b0-014249100af5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.133406] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54803447-a45e-47ab-bac3-9144eedecf34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.141364] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance 'b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2' progress to 83 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1196.149464] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Releasing lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.150397] env[61978]: DEBUG nova.compute.manager [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Inject network info {{(pid=61978) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7219}} [ 1196.152446] env[61978]: DEBUG nova.compute.manager [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] network_info to inject: |[{"id": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "address": "fa:16:3e:79:9a:37", "network": {"id": "74e44162-dbb7-4a19-b344-6133915c4ab5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-893790941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a1c323dddcd42809d565f46ecf5e18f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ba2eb7-45", "ovs_interfaceid": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1196.155663] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Reconfiguring VM instance to set the machine id {{(pid=61978) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1196.156041] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6ca3e6f-687e-471e-8eb2-632dbe6d4fcc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.172805] env[61978]: DEBUG oslo_vmware.api [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for the task: (returnval){ [ 1196.172805] env[61978]: value = "task-1395645" [ 1196.172805] env[61978]: _type = "Task" [ 1196.172805] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.192770] env[61978]: DEBUG oslo_vmware.api [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395645, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.193308] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395641, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.199595] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395642, 'name': PowerOffVM_Task, 'duration_secs': 0.205756} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.199911] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1196.200204] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1196.201174] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2dab934-fac0-4550-8cec-fde36b73b849 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.209876] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1196.211538] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-457164a2-5da5-406d-9e1d-61f6bf0e0154 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.258829] env[61978]: DEBUG oslo_vmware.api [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395643, 'name': PowerOffVM_Task, 'duration_secs': 0.208053} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.259088] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1196.259470] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1196.259832] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23fae2c0-61e0-4dee-9c6e-4fd2784c4db0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.267399] env[61978]: DEBUG nova.objects.instance [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lazy-loading 'flavor' on Instance uuid 1eae10e8-58b1-435d-86fc-0674725ce6cd {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.284852] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1196.285170] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1196.285445] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleting the datastore file [datastore2] 68791dff-12e0-499d-8835-1e9173af570f {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1196.286888] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3831e22b-e109-4643-9682-c65e3ef049af {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.294442] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1196.294442] env[61978]: value = "task-1395648" [ 1196.294442] env[61978]: _type = "Task" [ 1196.294442] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.302671] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395648, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.325922] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395644, 'name': Destroy_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.329657] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1196.329943] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1196.330186] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Deleting the datastore file [datastore1] 7d388d5c-2120-4dc5-a04f-5394e1e6f852 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1196.330461] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48a4fb96-1050-43cb-9951-04882fd4ea0e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.338711] env[61978]: DEBUG oslo_vmware.api [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Waiting for the task: (returnval){ [ 1196.338711] env[61978]: value = "task-1395649" [ 1196.338711] env[61978]: _type = "Task" [ 1196.338711] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.348408] env[61978]: DEBUG oslo_vmware.api [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.390205] env[61978]: DEBUG nova.scheduler.client.report [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1196.648360] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1196.648701] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85866f1b-7c0f-4fa7-88d6-2ea5cd6d2a00 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.656286] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1196.656286] env[61978]: value = "task-1395651" [ 1196.656286] env[61978]: _type = "Task" [ 1196.656286] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.664713] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395651, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.686724] env[61978]: DEBUG oslo_vmware.api [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395645, 'name': ReconfigVM_Task, 'duration_secs': 0.235367} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.690194] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6c2348-b743-433f-8606-5a0c11cd98a3 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Reconfigured VM instance to set the machine id {{(pid=61978) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1196.694581] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395641, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.636958} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.694992] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 97e128f9-7135-46b0-b22a-ee5449ba48b6/97e128f9-7135-46b0-b22a-ee5449ba48b6.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1196.695347] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1196.695730] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc20aed1-aee5-4fa3-82c1-0f7596891c67 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.714271] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1196.714271] env[61978]: value = "task-1395652" [ 1196.714271] env[61978]: _type = "Task" [ 1196.714271] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.724418] env[61978]: DEBUG nova.network.neutron [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Updated VIF entry in instance network info cache for port bba3eeec-259f-4ea3-b0f6-e509a29d33f4. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1196.725311] env[61978]: DEBUG nova.network.neutron [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Updating instance_info_cache with network_info: [{"id": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "address": "fa:16:3e:1e:db:c1", "network": {"id": "aa63ec3f-fde3-49f2-ab12-71ae85601428", "bridge": null, "label": "tempest-ServersNegativeTestJSON-445619089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198eab494c0a4e0eb83bae5732df9c78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapbba3eeec-25", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.731024] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395652, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.778157] env[61978]: DEBUG oslo_concurrency.lockutils [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquiring lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.778612] env[61978]: DEBUG oslo_concurrency.lockutils [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquired lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.803847] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395648, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.477672} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.804441] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1196.804441] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1196.804796] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1196.824481] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395644, 'name': Destroy_Task, 'duration_secs': 0.529563} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.825451] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Destroyed the VM [ 1196.826297] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1196.826663] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ef25b7ab-be46-42fe-92ab-5cdcfb068b53 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.835135] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1196.835135] env[61978]: value = "task-1395653" [ 1196.835135] env[61978]: _type = "Task" [ 1196.835135] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.847167] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395653, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.850823] env[61978]: DEBUG oslo_vmware.api [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Task: {'id': task-1395649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.499839} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.851204] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1196.851411] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1196.851643] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1196.851873] env[61978]: INFO nova.compute.manager [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1196.852202] env[61978]: DEBUG oslo.service.loopingcall [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1196.852457] env[61978]: DEBUG nova.compute.manager [-] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1196.852636] env[61978]: DEBUG nova.network.neutron [-] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1196.895190] env[61978]: DEBUG oslo_concurrency.lockutils [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.867s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.897952] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.267s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.898069] env[61978]: DEBUG nova.objects.instance [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lazy-loading 'resources' on Instance uuid 17c56c1c-9992-4559-ad23-c68909ae6792 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.922910] env[61978]: INFO nova.scheduler.client.report [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Deleted allocations for instance 9ee04ee8-98ec-4be9-935d-cad7cd176466 [ 1197.166735] env[61978]: DEBUG oslo_vmware.api [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395651, 'name': PowerOnVM_Task, 'duration_secs': 0.417646} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.167037] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1197.167252] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7a1b75-2def-4708-b6c7-315964731bc7 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance 'b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2' progress to 100 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1197.228020] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395652, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072398} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.228020] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1197.228020] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61336347-7b21-45ee-aa7f-86edcc08bdf4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.231648] env[61978]: DEBUG oslo_concurrency.lockutils [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] Releasing lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.231648] env[61978]: DEBUG nova.compute.manager [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Received event network-vif-deleted-fdf95a42-1379-4895-9a94-f8a8cf1d070d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1197.231867] env[61978]: DEBUG nova.compute.manager [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Received event network-changed-63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1197.231934] env[61978]: DEBUG nova.compute.manager [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Refreshing instance network info cache due to event network-changed-63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1197.232853] env[61978]: DEBUG oslo_concurrency.lockutils [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] Acquiring lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.256296] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 97e128f9-7135-46b0-b22a-ee5449ba48b6/97e128f9-7135-46b0-b22a-ee5449ba48b6.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1197.257443] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02a3b944-61af-41e3-aaa6-8dfb6aac225d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.287020] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1197.287020] env[61978]: value = "task-1395654" [ 1197.287020] env[61978]: _type = "Task" [ 1197.287020] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.293486] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395654, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.354466] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395653, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.401737] env[61978]: DEBUG nova.objects.instance [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lazy-loading 'numa_topology' on Instance uuid 17c56c1c-9992-4559-ad23-c68909ae6792 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.438072] env[61978]: DEBUG oslo_concurrency.lockutils [None req-735bdc12-3171-4970-a796-914a6bf99403 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "9ee04ee8-98ec-4be9-935d-cad7cd176466" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.072s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.620349] env[61978]: DEBUG nova.network.neutron [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1197.680672] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "17c56c1c-9992-4559-ad23-c68909ae6792" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.752139] env[61978]: DEBUG nova.compute.manager [req-c86c288f-d6b7-4f9e-91cc-c896c3263230 req-21671223-46f9-4ba9-a958-bdda0bf2ea48 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Received event network-vif-deleted-28538b34-2ffa-4e6e-a451-0654e6ec063d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1197.752388] env[61978]: INFO nova.compute.manager [req-c86c288f-d6b7-4f9e-91cc-c896c3263230 req-21671223-46f9-4ba9-a958-bdda0bf2ea48 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Neutron deleted interface 28538b34-2ffa-4e6e-a451-0654e6ec063d; detaching it from the instance and deleting it from the info cache [ 1197.752684] env[61978]: DEBUG nova.network.neutron [req-c86c288f-d6b7-4f9e-91cc-c896c3263230 req-21671223-46f9-4ba9-a958-bdda0bf2ea48 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.798437] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395654, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.849238] env[61978]: DEBUG nova.virt.hardware [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1197.849543] env[61978]: DEBUG nova.virt.hardware [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1197.849757] env[61978]: DEBUG nova.virt.hardware [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1197.849987] env[61978]: DEBUG nova.virt.hardware [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1197.850211] env[61978]: DEBUG nova.virt.hardware [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1197.850408] env[61978]: DEBUG nova.virt.hardware [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1197.850656] env[61978]: DEBUG nova.virt.hardware [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1197.850855] env[61978]: DEBUG nova.virt.hardware [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1197.851273] env[61978]: DEBUG nova.virt.hardware [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1197.851495] env[61978]: DEBUG nova.virt.hardware [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1197.851713] env[61978]: DEBUG nova.virt.hardware [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1197.853784] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-199bb5aa-89d0-4736-aebb-cbdc908b1f62 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.866353] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395653, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.870445] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e95ca4d-50e0-4ee5-affb-6a1b2bd87030 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.887056] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:d0:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73fe675b-ef9e-44db-a9d2-13d68f04aacb', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1197.895182] env[61978]: DEBUG oslo.service.loopingcall [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1197.896204] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1197.896700] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d351189-bb25-48b3-84cc-ede636e6f1e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.913013] env[61978]: DEBUG nova.objects.base [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Object Instance<17c56c1c-9992-4559-ad23-c68909ae6792> lazy-loaded attributes: resources,numa_topology {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1197.921170] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1197.921170] env[61978]: value = "task-1395655" [ 1197.921170] env[61978]: _type = "Task" [ 1197.921170] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.929351] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395655, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.014846] env[61978]: DEBUG nova.compute.manager [req-447d3a44-dc85-4f30-adb5-58816fa6635f req-763a3dfe-bc16-40ae-8036-72488b1b56c9 service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Received event network-vif-deleted-50f09356-baf0-487b-a1f9-4cdc359c1daf {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1198.015476] env[61978]: INFO nova.compute.manager [req-447d3a44-dc85-4f30-adb5-58816fa6635f req-763a3dfe-bc16-40ae-8036-72488b1b56c9 service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Neutron deleted interface 50f09356-baf0-487b-a1f9-4cdc359c1daf; detaching it from the instance and deleting it from the info cache [ 1198.015695] env[61978]: DEBUG nova.network.neutron [req-447d3a44-dc85-4f30-adb5-58816fa6635f req-763a3dfe-bc16-40ae-8036-72488b1b56c9 service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.080160] env[61978]: DEBUG nova.network.neutron [-] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.238524] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f7dd8c-3dfd-4041-a27f-4f20ef6722b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.249407] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd01094-6285-41a6-9192-19043e131d84 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.289466] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3bebd3c3-ce49-49c1-b9c4-4cab82bd182f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.295196] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecc5965-a132-4930-ba13-6693b1621ebd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.303864] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395654, 'name': ReconfigVM_Task, 'duration_secs': 0.561226} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.304510] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 97e128f9-7135-46b0-b22a-ee5449ba48b6/97e128f9-7135-46b0-b22a-ee5449ba48b6.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1198.309598] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-263e207e-b6e1-4ecd-9da3-91fd99177b07 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.315041] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c79fa83-e374-4ece-9c9b-bde44dbed79e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.319586] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d23bd4f-ceb7-4f6c-8030-b852c724b652 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.332153] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1198.332153] env[61978]: value = "task-1395656" [ 1198.332153] env[61978]: _type = "Task" [ 1198.332153] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.343616] env[61978]: DEBUG nova.compute.provider_tree [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1198.362901] env[61978]: DEBUG nova.compute.manager [req-c86c288f-d6b7-4f9e-91cc-c896c3263230 req-21671223-46f9-4ba9-a958-bdda0bf2ea48 service nova] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Detach interface failed, port_id=28538b34-2ffa-4e6e-a451-0654e6ec063d, reason: Instance ae6b92bb-6f79-4b52-bdb7-095985bf2fad could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1198.366758] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395656, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.371701] env[61978]: DEBUG oslo_vmware.api [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395653, 'name': RemoveSnapshot_Task, 'duration_secs': 1.060203} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.372531] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1198.433160] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395655, 'name': CreateVM_Task, 'duration_secs': 0.384807} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.433160] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1198.433706] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1198.433826] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.434231] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1198.434485] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0c6f506-2d2e-44e1-86d7-0f6a317bcd7d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.439438] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1198.439438] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b627aa-21d7-b58b-2fe0-c09550ae3813" [ 1198.439438] env[61978]: _type = "Task" [ 1198.439438] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.448216] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b627aa-21d7-b58b-2fe0-c09550ae3813, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.470839] env[61978]: DEBUG nova.network.neutron [-] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.522627] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c5e4708-d913-411d-a994-7fc8aa7683dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.545285] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ccaa92-8894-4497-9322-ec0cdd7a0433 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.583122] env[61978]: DEBUG nova.compute.manager [req-447d3a44-dc85-4f30-adb5-58816fa6635f req-763a3dfe-bc16-40ae-8036-72488b1b56c9 service nova] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Detach interface failed, port_id=50f09356-baf0-487b-a1f9-4cdc359c1daf, reason: Instance 7d388d5c-2120-4dc5-a04f-5394e1e6f852 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1198.585777] env[61978]: INFO nova.compute.manager [-] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Took 2.55 seconds to deallocate network for instance. [ 1198.837644] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1198.838107] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295999', 'volume_id': '6f97516a-a581-42c8-8158-14d54c5b9874', 'name': 'volume-6f97516a-a581-42c8-8158-14d54c5b9874', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6', 'attached_at': '', 'detached_at': '', 'volume_id': '6f97516a-a581-42c8-8158-14d54c5b9874', 'serial': '6f97516a-a581-42c8-8158-14d54c5b9874'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1198.839043] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49ab9b9-cd69-4e41-872a-f1b9a47ff9b2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.848473] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395656, 'name': Rename_Task, 'duration_secs': 0.203144} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.865430] env[61978]: DEBUG nova.scheduler.client.report [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1198.869192] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1198.870226] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-26b3a2d4-b07f-4ddb-83c8-f70495a34e67 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.872600] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1187bb-d29c-40d3-8682-785052bb5b53 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.876758] env[61978]: WARNING nova.compute.manager [None req-3474e61b-3a3f-4b47-a135-694029bbaf2c tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Image not found during snapshot: nova.exception.ImageNotFound: Image 244c307e-67dc-4161-9e24-d650ad818691 could not be found. [ 1198.901298] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1198.901298] env[61978]: value = "task-1395658" [ 1198.901298] env[61978]: _type = "Task" [ 1198.901298] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.910543] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] volume-6f97516a-a581-42c8-8158-14d54c5b9874/volume-6f97516a-a581-42c8-8158-14d54c5b9874.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1198.913893] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9f1d293-5411-419b-8296-2e875af2c912 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.939557] env[61978]: DEBUG oslo_vmware.api [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1198.939557] env[61978]: value = "task-1395659" [ 1198.939557] env[61978]: _type = "Task" [ 1198.939557] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.939778] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395658, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.951933] env[61978]: DEBUG oslo_vmware.api [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395659, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.955578] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b627aa-21d7-b58b-2fe0-c09550ae3813, 'name': SearchDatastore_Task, 'duration_secs': 0.009784} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.955880] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.956147] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1198.956431] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1198.956602] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.956771] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1198.957070] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e84f6c16-1ea1-45e2-b49d-c32596d78e69 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.965711] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1198.965944] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1198.966790] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab2f02d8-b559-4e24-abdd-bf81e8c9db22 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.971969] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1198.971969] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52691bdb-a061-ebd6-9c50-9beda0340424" [ 1198.971969] env[61978]: _type = "Task" [ 1198.971969] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.979102] env[61978]: INFO nova.compute.manager [-] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Took 2.12 seconds to deallocate network for instance. [ 1198.985478] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52691bdb-a061-ebd6-9c50-9beda0340424, 'name': SearchDatastore_Task, 'duration_secs': 0.010119} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.989033] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7f0f8c0-dabd-43ce-a202-c3bf6d33c020 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.991631] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1198.991631] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523d00c7-e2cd-9729-9d77-fffaf12be6a5" [ 1198.991631] env[61978]: _type = "Task" [ 1198.991631] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.000176] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523d00c7-e2cd-9729-9d77-fffaf12be6a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.068961] env[61978]: DEBUG nova.network.neutron [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Updating instance_info_cache with network_info: [{"id": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "address": "fa:16:3e:79:9a:37", "network": {"id": "74e44162-dbb7-4a19-b344-6133915c4ab5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-893790941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a1c323dddcd42809d565f46ecf5e18f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ba2eb7-45", "ovs_interfaceid": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.091891] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.373488] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.473s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.378432] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.284s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.378899] env[61978]: DEBUG nova.objects.instance [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lazy-loading 'resources' on Instance uuid ae6b92bb-6f79-4b52-bdb7-095985bf2fad {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.426138] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395658, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.449891] env[61978]: DEBUG oslo_vmware.api [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395659, 'name': ReconfigVM_Task, 'duration_secs': 0.499924} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.450245] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Reconfigured VM instance instance-0000004b to attach disk [datastore2] volume-6f97516a-a581-42c8-8158-14d54c5b9874/volume-6f97516a-a581-42c8-8158-14d54c5b9874.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1199.456373] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bd31c46-ef02-4e85-b3f5-258cab24cfe5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.474878] env[61978]: DEBUG oslo_vmware.api [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1199.474878] env[61978]: value = "task-1395660" [ 1199.474878] env[61978]: _type = "Task" [ 1199.474878] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.483803] env[61978]: DEBUG oslo_vmware.api [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395660, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.486940] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.502276] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523d00c7-e2cd-9729-9d77-fffaf12be6a5, 'name': SearchDatastore_Task, 'duration_secs': 0.01572} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.502912] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.503218] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 68791dff-12e0-499d-8835-1e9173af570f/68791dff-12e0-499d-8835-1e9173af570f.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1199.503501] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e08307c-49b0-42ac-8d41-c554c04c079a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.512017] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1199.512017] env[61978]: value = "task-1395661" [ 1199.512017] env[61978]: _type = "Task" [ 1199.512017] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.519987] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395661, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.572036] env[61978]: DEBUG oslo_concurrency.lockutils [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Releasing lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.572328] env[61978]: DEBUG nova.compute.manager [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Inject network info {{(pid=61978) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7219}} [ 1199.572602] env[61978]: DEBUG nova.compute.manager [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] network_info to inject: |[{"id": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "address": "fa:16:3e:79:9a:37", "network": {"id": "74e44162-dbb7-4a19-b344-6133915c4ab5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-893790941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a1c323dddcd42809d565f46ecf5e18f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ba2eb7-45", "ovs_interfaceid": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1199.578513] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Reconfiguring VM instance to set the machine id {{(pid=61978) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1199.578719] env[61978]: DEBUG oslo_concurrency.lockutils [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] Acquired lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.578919] env[61978]: DEBUG nova.network.neutron [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Refreshing network info cache for port 63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1199.580457] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be31937c-8083-44fd-832b-bdb2b27c6f5e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.602567] env[61978]: DEBUG oslo_vmware.api [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for the task: (returnval){ [ 1199.602567] env[61978]: value = "task-1395662" [ 1199.602567] env[61978]: _type = "Task" [ 1199.602567] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.614022] env[61978]: DEBUG oslo_vmware.api [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395662, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.674714] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.675076] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.675318] env[61978]: DEBUG nova.compute.manager [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Going to confirm migration 4 {{(pid=61978) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1199.893563] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0eec114-65d9-4d6e-a42d-5db64d6cadf8 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "17c56c1c-9992-4559-ad23-c68909ae6792" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.820s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.893563] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "17c56c1c-9992-4559-ad23-c68909ae6792" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.213s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.893991] env[61978]: INFO nova.compute.manager [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Unshelving [ 1199.901973] env[61978]: DEBUG nova.network.neutron [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Updated VIF entry in instance network info cache for port 63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1199.903117] env[61978]: DEBUG nova.network.neutron [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Updating instance_info_cache with network_info: [{"id": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "address": "fa:16:3e:79:9a:37", "network": {"id": "74e44162-dbb7-4a19-b344-6133915c4ab5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-893790941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a1c323dddcd42809d565f46ecf5e18f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ba2eb7-45", "ovs_interfaceid": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.933731] env[61978]: DEBUG oslo_vmware.api [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395658, 'name': PowerOnVM_Task, 'duration_secs': 0.544048} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.934077] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1199.934318] env[61978]: INFO nova.compute.manager [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Took 9.79 seconds to spawn the instance on the hypervisor. [ 1199.934513] env[61978]: DEBUG nova.compute.manager [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1199.935457] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb956d76-9e2d-4b06-8edd-ff748a850afb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.988796] env[61978]: DEBUG oslo_vmware.api [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395660, 'name': ReconfigVM_Task, 'duration_secs': 0.216275} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.991836] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295999', 'volume_id': '6f97516a-a581-42c8-8158-14d54c5b9874', 'name': 'volume-6f97516a-a581-42c8-8158-14d54c5b9874', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6', 'attached_at': '', 'detached_at': '', 'volume_id': '6f97516a-a581-42c8-8158-14d54c5b9874', 'serial': '6f97516a-a581-42c8-8158-14d54c5b9874'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1200.025534] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395661, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.116022] env[61978]: DEBUG oslo_vmware.api [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395662, 'name': ReconfigVM_Task, 'duration_secs': 0.156616} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.116022] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-363de708-1ad0-459f-be4b-4518607a8576 tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Reconfigured VM instance to set the machine id {{(pid=61978) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1200.188104] env[61978]: DEBUG nova.compute.manager [req-7aa2d8db-3525-4f3e-82af-a2bd5ce8ddba req-5eac6700-09cf-4031-9db8-fcf8286cda95 service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Received event network-changed-63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1200.188315] env[61978]: DEBUG nova.compute.manager [req-7aa2d8db-3525-4f3e-82af-a2bd5ce8ddba req-5eac6700-09cf-4031-9db8-fcf8286cda95 service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Refreshing instance network info cache due to event network-changed-63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1200.188520] env[61978]: DEBUG oslo_concurrency.lockutils [req-7aa2d8db-3525-4f3e-82af-a2bd5ce8ddba req-5eac6700-09cf-4031-9db8-fcf8286cda95 service nova] Acquiring lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.220131] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde132bc-d06b-46b8-8705-f3865177e7b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.227707] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f9c128-0305-4295-b1db-557e4985ef98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.258396] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.258688] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquired lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.258949] env[61978]: DEBUG nova.network.neutron [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1200.259217] env[61978]: DEBUG nova.objects.instance [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lazy-loading 'info_cache' on Instance uuid b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.261024] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ef2807-0865-461b-b69c-96b38d77adb1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.270552] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8153a8b-8480-4549-83e2-924c0b373862 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.287290] env[61978]: DEBUG nova.compute.provider_tree [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1200.405902] env[61978]: DEBUG oslo_concurrency.lockutils [req-6d52748f-1945-4129-bf87-13b2a32a165d req-91923bd9-5e33-4564-a4fe-5949f01c5885 service nova] Releasing lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.406295] env[61978]: DEBUG oslo_concurrency.lockutils [req-7aa2d8db-3525-4f3e-82af-a2bd5ce8ddba req-5eac6700-09cf-4031-9db8-fcf8286cda95 service nova] Acquired lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.406504] env[61978]: DEBUG nova.network.neutron [req-7aa2d8db-3525-4f3e-82af-a2bd5ce8ddba req-5eac6700-09cf-4031-9db8-fcf8286cda95 service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Refreshing network info cache for port 63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1200.467593] env[61978]: INFO nova.compute.manager [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Took 18.33 seconds to build instance. [ 1200.486414] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "90a38dba-0dae-455a-8d02-44c2bb098fb5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.486705] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "90a38dba-0dae-455a-8d02-44c2bb098fb5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.520292] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "b76dd94e-c14b-48d4-bb7f-020313412ca2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.520546] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "b76dd94e-c14b-48d4-bb7f-020313412ca2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.532212] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395661, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630401} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.532212] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 68791dff-12e0-499d-8835-1e9173af570f/68791dff-12e0-499d-8835-1e9173af570f.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1200.532212] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1200.532212] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-699d65f6-47e0-4f16-b3b3-43000aefcb3f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.544810] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1200.544810] env[61978]: value = "task-1395663" [ 1200.544810] env[61978]: _type = "Task" [ 1200.544810] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.555813] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395663, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.561219] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "e9b70b36-d0d8-430e-a5e7-588d3c75d7ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.561497] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "e9b70b36-d0d8-430e-a5e7-588d3c75d7ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.793290] env[61978]: DEBUG nova.scheduler.client.report [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1200.919245] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.929350] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "c0be687a-7444-4019-8b12-dac41a7c080e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.929507] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "c0be687a-7444-4019-8b12-dac41a7c080e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.929710] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "c0be687a-7444-4019-8b12-dac41a7c080e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.929897] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "c0be687a-7444-4019-8b12-dac41a7c080e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.930081] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "c0be687a-7444-4019-8b12-dac41a7c080e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.932212] env[61978]: INFO nova.compute.manager [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Terminating instance [ 1200.934016] env[61978]: DEBUG nova.compute.manager [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1200.934303] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1200.935195] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3131cd2-c2a5-4747-b678-88d40ef8e71b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.945249] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1200.945531] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-477813a8-34b3-46bc-b222-e534fc778ed7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.950876] env[61978]: DEBUG oslo_vmware.api [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1200.950876] env[61978]: value = "task-1395664" [ 1200.950876] env[61978]: _type = "Task" [ 1200.950876] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.959515] env[61978]: DEBUG oslo_vmware.api [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395664, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.967254] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e03baed8-4496-4a6f-ab70-61f475822240 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "97e128f9-7135-46b0-b22a-ee5449ba48b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.846s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.990018] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1201.022911] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1201.046884] env[61978]: DEBUG nova.objects.instance [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'flavor' on Instance uuid a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1201.063271] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395663, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073656} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.063271] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1201.064272] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1201.068458] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7684a2-8888-47cb-9294-c5abeca672d1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.095541] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 68791dff-12e0-499d-8835-1e9173af570f/68791dff-12e0-499d-8835-1e9173af570f.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1201.100835] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5405b73-904e-4e12-b900-64e9855030ec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.124816] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1201.124816] env[61978]: value = "task-1395665" [ 1201.124816] env[61978]: _type = "Task" [ 1201.124816] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.137200] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395665, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.179860] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquiring lock "1eae10e8-58b1-435d-86fc-0674725ce6cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.180331] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lock "1eae10e8-58b1-435d-86fc-0674725ce6cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.180960] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquiring lock "1eae10e8-58b1-435d-86fc-0674725ce6cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.181652] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lock "1eae10e8-58b1-435d-86fc-0674725ce6cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.182051] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lock "1eae10e8-58b1-435d-86fc-0674725ce6cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.185700] env[61978]: INFO nova.compute.manager [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Terminating instance [ 1201.188827] env[61978]: DEBUG nova.compute.manager [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1201.189325] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1201.191098] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5519d5-604d-48b7-9446-56ccb314ba15 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.204587] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1201.205067] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d2126a1-5eac-4b39-9e19-e866c6dbf593 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.211821] env[61978]: DEBUG oslo_vmware.api [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for the task: (returnval){ [ 1201.211821] env[61978]: value = "task-1395666" [ 1201.211821] env[61978]: _type = "Task" [ 1201.211821] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.224537] env[61978]: DEBUG oslo_vmware.api [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395666, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.274112] env[61978]: DEBUG nova.network.neutron [req-7aa2d8db-3525-4f3e-82af-a2bd5ce8ddba req-5eac6700-09cf-4031-9db8-fcf8286cda95 service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Updated VIF entry in instance network info cache for port 63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1201.274648] env[61978]: DEBUG nova.network.neutron [req-7aa2d8db-3525-4f3e-82af-a2bd5ce8ddba req-5eac6700-09cf-4031-9db8-fcf8286cda95 service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Updating instance_info_cache with network_info: [{"id": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "address": "fa:16:3e:79:9a:37", "network": {"id": "74e44162-dbb7-4a19-b344-6133915c4ab5", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-893790941-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a1c323dddcd42809d565f46ecf5e18f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ba2eb7-45", "ovs_interfaceid": "63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.297358] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.922s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.299833] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.813s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.300023] env[61978]: DEBUG nova.objects.instance [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Lazy-loading 'resources' on Instance uuid 7d388d5c-2120-4dc5-a04f-5394e1e6f852 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1201.319886] env[61978]: INFO nova.scheduler.client.report [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted allocations for instance ae6b92bb-6f79-4b52-bdb7-095985bf2fad [ 1201.466936] env[61978]: DEBUG oslo_vmware.api [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395664, 'name': PowerOffVM_Task, 'duration_secs': 0.199372} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.466936] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.467645] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1201.467645] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5274bbd5-c1bc-4585-856f-1d6657e6aec3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.512878] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.547056] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.558586] env[61978]: DEBUG oslo_concurrency.lockutils [None req-67f5093c-dd19-48c6-ab50-f2a104e55f65 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.380s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.574389] env[61978]: DEBUG nova.network.neutron [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance_info_cache with network_info: [{"id": "2efcc135-18f4-45d3-9408-817cdbada770", "address": "fa:16:3e:8c:cc:2c", "network": {"id": "f4e40de3-2e4e-41be-8f2d-4cf29c15dcec", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1125594600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43ebac7c44604f55b94cbc06648f4908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2efcc135-18", "ovs_interfaceid": "2efcc135-18f4-45d3-9408-817cdbada770", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.589744] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.638034] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395665, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.659916] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.660144] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.660357] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Deleting the datastore file [datastore2] c0be687a-7444-4019-8b12-dac41a7c080e {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.660630] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4c4a243-53c8-4650-b7e9-e6b29d0047aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.667264] env[61978]: DEBUG oslo_vmware.api [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1201.667264] env[61978]: value = "task-1395668" [ 1201.667264] env[61978]: _type = "Task" [ 1201.667264] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.675282] env[61978]: DEBUG oslo_vmware.api [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.722744] env[61978]: DEBUG oslo_vmware.api [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395666, 'name': PowerOffVM_Task, 'duration_secs': 0.207226} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.723551] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.723551] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1201.723551] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-775c502a-7358-44c4-b878-92f406c689eb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.777212] env[61978]: DEBUG oslo_concurrency.lockutils [req-7aa2d8db-3525-4f3e-82af-a2bd5ce8ddba req-5eac6700-09cf-4031-9db8-fcf8286cda95 service nova] Releasing lock "refresh_cache-1eae10e8-58b1-435d-86fc-0674725ce6cd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.783587] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.783853] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.784030] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Deleting the datastore file [datastore1] 1eae10e8-58b1-435d-86fc-0674725ce6cd {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.784325] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9ce18c8-3c54-47d2-8bf0-b35b760d79d2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.790913] env[61978]: DEBUG oslo_vmware.api [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for the task: (returnval){ [ 1201.790913] env[61978]: value = "task-1395670" [ 1201.790913] env[61978]: _type = "Task" [ 1201.790913] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.801274] env[61978]: DEBUG oslo_vmware.api [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395670, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.830486] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a6cf3366-813d-4883-8e0d-c43606b83c81 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "ae6b92bb-6f79-4b52-bdb7-095985bf2fad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.951s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.942700] env[61978]: INFO nova.compute.manager [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Rebuilding instance [ 1201.992687] env[61978]: DEBUG nova.compute.manager [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1201.993701] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34bc054-80cf-45e7-ab2e-89edac52322e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.071928] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd0e73a-c753-467d-8105-17d6590cbc87 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.077241] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Releasing lock "refresh_cache-b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.077532] env[61978]: DEBUG nova.objects.instance [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lazy-loading 'migration_context' on Instance uuid b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1202.081692] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70046539-114b-4a6a-b0e5-2374595a3979 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.114995] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c2f50e-246e-467e-baea-518dba78f75d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.123238] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45cfb32-f3ab-4943-b81a-bd914d2b20ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.135610] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395665, 'name': ReconfigVM_Task, 'duration_secs': 0.722029} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.143349] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 68791dff-12e0-499d-8835-1e9173af570f/68791dff-12e0-499d-8835-1e9173af570f.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1202.144597] env[61978]: DEBUG nova.compute.provider_tree [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.148051] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b710819-3bf3-4025-b3d2-cd21ce70f367 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.157200] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1202.157200] env[61978]: value = "task-1395671" [ 1202.157200] env[61978]: _type = "Task" [ 1202.157200] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.166957] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395671, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.177260] env[61978]: DEBUG oslo_vmware.api [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395668, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15919} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.177540] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.177747] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1202.177994] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1202.178270] env[61978]: INFO nova.compute.manager [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1202.178597] env[61978]: DEBUG oslo.service.loopingcall [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1202.178779] env[61978]: DEBUG nova.compute.manager [-] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1202.178779] env[61978]: DEBUG nova.network.neutron [-] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1202.302076] env[61978]: DEBUG oslo_vmware.api [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Task: {'id': task-1395670, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161403} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.302349] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.303078] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1202.303078] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1202.303078] env[61978]: INFO nova.compute.manager [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1202.303253] env[61978]: DEBUG oslo.service.loopingcall [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1202.303342] env[61978]: DEBUG nova.compute.manager [-] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1202.303452] env[61978]: DEBUG nova.network.neutron [-] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1202.509436] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1202.509577] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ba69a5a-1320-4b43-b6d5-27589675051b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.518251] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1202.518251] env[61978]: value = "task-1395672" [ 1202.518251] env[61978]: _type = "Task" [ 1202.518251] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.529172] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.582312] env[61978]: DEBUG nova.objects.base [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1202.583373] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f86085-e2b3-4082-a1e8-c54833868efc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.607297] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90ebca81-82fe-40b9-b9af-05a974b410b7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.613236] env[61978]: DEBUG oslo_vmware.api [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1202.613236] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5246c2df-088b-a56a-7121-c0c2b1e9d96a" [ 1202.613236] env[61978]: _type = "Task" [ 1202.613236] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.621358] env[61978]: DEBUG oslo_vmware.api [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5246c2df-088b-a56a-7121-c0c2b1e9d96a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.652609] env[61978]: DEBUG nova.scheduler.client.report [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1202.656324] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.656611] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.656824] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.657023] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.658607] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.663796] env[61978]: INFO nova.compute.manager [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Terminating instance [ 1202.666326] env[61978]: DEBUG nova.compute.manager [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1202.666649] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1202.667616] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7596008-3d32-44af-9273-88087acf4039 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.674697] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395671, 'name': Rename_Task, 'duration_secs': 0.192633} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.675962] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1202.676284] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e676280-6f6b-4477-bb2f-4168d554d002 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.680501] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1202.681185] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb0ab983-22b9-4d7c-9b56-86d5817f0590 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.686594] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1202.686594] env[61978]: value = "task-1395673" [ 1202.686594] env[61978]: _type = "Task" [ 1202.686594] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.687339] env[61978]: DEBUG oslo_vmware.api [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1202.687339] env[61978]: value = "task-1395674" [ 1202.687339] env[61978]: _type = "Task" [ 1202.687339] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.698021] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395673, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.702348] env[61978]: DEBUG oslo_vmware.api [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395674, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.717485] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquiring lock "27713bbd-1234-44ae-8520-78d85baaae12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.717485] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Lock "27713bbd-1234-44ae-8520-78d85baaae12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.035202] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.125330] env[61978]: DEBUG oslo_vmware.api [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5246c2df-088b-a56a-7121-c0c2b1e9d96a, 'name': SearchDatastore_Task, 'duration_secs': 0.010363} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.125633] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.162184] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.862s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.165449] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.247s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.165687] env[61978]: DEBUG nova.objects.instance [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lazy-loading 'pci_requests' on Instance uuid 17c56c1c-9992-4559-ad23-c68909ae6792 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1203.192585] env[61978]: INFO nova.scheduler.client.report [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Deleted allocations for instance 7d388d5c-2120-4dc5-a04f-5394e1e6f852 [ 1203.206307] env[61978]: DEBUG nova.network.neutron [-] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.211529] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395673, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.214638] env[61978]: DEBUG oslo_vmware.api [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395674, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.220139] env[61978]: DEBUG nova.compute.manager [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1203.248286] env[61978]: DEBUG nova.compute.manager [req-a98f7035-c9f0-4082-8aa0-159ac116769e req-6fc38a01-c1a8-422d-af08-b73668e0a170 service nova] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Received event network-vif-deleted-52cdfaa1-00dd-4eed-94aa-d186999d0614 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1203.374986] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.375259] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.394846] env[61978]: DEBUG nova.network.neutron [-] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.479091] env[61978]: DEBUG nova.compute.manager [req-d4feb298-1ab9-4e88-ab7b-0dafe9ec6830 req-1eac138e-0832-4af8-8251-92dc0799e427 service nova] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Received event network-vif-deleted-63ba2eb7-451f-4ff9-8a92-cbf21a4cf6f8 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1203.529563] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395672, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.673417] env[61978]: DEBUG nova.objects.instance [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lazy-loading 'numa_topology' on Instance uuid 17c56c1c-9992-4559-ad23-c68909ae6792 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1203.700538] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395673, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.707934] env[61978]: DEBUG oslo_vmware.api [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395674, 'name': PowerOffVM_Task, 'duration_secs': 0.892806} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.708548] env[61978]: DEBUG oslo_concurrency.lockutils [None req-86d7bccd-93b9-4703-895b-7d6d7ee7227c tempest-ServersTestJSON-1944430372 tempest-ServersTestJSON-1944430372-project-member] Lock "7d388d5c-2120-4dc5-a04f-5394e1e6f852" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.994s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.710104] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1203.710382] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1203.710932] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66d0b370-7dc0-4e4a-b558-af88dd1edba5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.713031] env[61978]: INFO nova.compute.manager [-] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Took 1.53 seconds to deallocate network for instance. [ 1203.742295] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.787304] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1203.787603] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1203.787804] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Deleting the datastore file [datastore2] bdfdd685-e440-4f53-b6c4-2ee2f06acba8 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1203.788107] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-13d47087-8532-4853-ae25-1fe410fc5a67 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.793971] env[61978]: DEBUG oslo_vmware.api [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1203.793971] env[61978]: value = "task-1395676" [ 1203.793971] env[61978]: _type = "Task" [ 1203.793971] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.802525] env[61978]: DEBUG oslo_vmware.api [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395676, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.878987] env[61978]: INFO nova.compute.manager [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Detaching volume 96d26999-1056-47ad-b42f-1ccc31fe5872 [ 1203.897728] env[61978]: INFO nova.compute.manager [-] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Took 1.59 seconds to deallocate network for instance. [ 1203.925069] env[61978]: INFO nova.virt.block_device [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Attempting to driver detach volume 96d26999-1056-47ad-b42f-1ccc31fe5872 from mountpoint /dev/sdb [ 1203.925373] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1203.925614] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295994', 'volume_id': '96d26999-1056-47ad-b42f-1ccc31fe5872', 'name': 'volume-96d26999-1056-47ad-b42f-1ccc31fe5872', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6', 'attached_at': '', 'detached_at': '', 'volume_id': '96d26999-1056-47ad-b42f-1ccc31fe5872', 'serial': '96d26999-1056-47ad-b42f-1ccc31fe5872'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1203.926785] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4702fc88-afe8-46d0-a74c-4d949004daab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.951620] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1147da-1578-43a5-a527-151b1821a3af {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.962042] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d556e57-517a-42be-b213-b8301b81a386 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.987247] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0b5af0-473f-476a-8a59-8612b960b855 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.003890] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] The volume has not been displaced from its original location: [datastore1] volume-96d26999-1056-47ad-b42f-1ccc31fe5872/volume-96d26999-1056-47ad-b42f-1ccc31fe5872.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1204.009395] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Reconfiguring VM instance instance-0000004b to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1204.009773] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-761ad3c8-d7eb-4c10-b3ff-292942391dc6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.032705] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395672, 'name': PowerOffVM_Task, 'duration_secs': 1.063704} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.034251] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1204.034507] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1204.034854] env[61978]: DEBUG oslo_vmware.api [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1204.034854] env[61978]: value = "task-1395677" [ 1204.034854] env[61978]: _type = "Task" [ 1204.034854] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.035572] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d6b37c-f4ad-4619-9d37-ea9b18d77068 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.047256] env[61978]: DEBUG oslo_vmware.api [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395677, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.049429] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1204.049705] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-caefd2e0-23b5-4586-8271-f129d6030b72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.175745] env[61978]: INFO nova.compute.claims [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1204.206835] env[61978]: DEBUG oslo_vmware.api [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395673, 'name': PowerOnVM_Task, 'duration_secs': 1.220743} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.206983] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1204.207240] env[61978]: DEBUG nova.compute.manager [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1204.208191] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925c533e-e9c0-4d08-a3d5-8068e117066d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.221359] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.305191] env[61978]: DEBUG oslo_vmware.api [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395676, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.415177} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.305474] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1204.305669] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1204.305858] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1204.306052] env[61978]: INFO nova.compute.manager [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1204.306395] env[61978]: DEBUG oslo.service.loopingcall [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1204.306593] env[61978]: DEBUG nova.compute.manager [-] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1204.306691] env[61978]: DEBUG nova.network.neutron [-] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1204.405088] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.548787] env[61978]: DEBUG oslo_vmware.api [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395677, 'name': ReconfigVM_Task, 'duration_secs': 0.338091} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.549191] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Reconfigured VM instance instance-0000004b to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1204.553791] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbe3e236-1be1-447c-b0b9-ace38ab079c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.574097] env[61978]: DEBUG oslo_vmware.api [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1204.574097] env[61978]: value = "task-1395679" [ 1204.574097] env[61978]: _type = "Task" [ 1204.574097] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.586499] env[61978]: DEBUG oslo_vmware.api [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395679, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.727892] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.084998] env[61978]: DEBUG oslo_vmware.api [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395679, 'name': ReconfigVM_Task, 'duration_secs': 0.173615} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.085269] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295994', 'volume_id': '96d26999-1056-47ad-b42f-1ccc31fe5872', 'name': 'volume-96d26999-1056-47ad-b42f-1ccc31fe5872', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6', 'attached_at': '', 'detached_at': '', 'volume_id': '96d26999-1056-47ad-b42f-1ccc31fe5872', 'serial': '96d26999-1056-47ad-b42f-1ccc31fe5872'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1205.118618] env[61978]: DEBUG nova.network.neutron [-] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.343057] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "f33d00ec-72b7-43f2-bc0d-320e3219ae47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.343057] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "f33d00ec-72b7-43f2-bc0d-320e3219ae47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.505885] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d18553-0dc5-4185-9fdc-6d05c93c3b3b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.515026] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386f8d9a-cce2-48c2-9e2d-b95792f091d1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.523350] env[61978]: DEBUG nova.compute.manager [req-9381d7a9-fb08-4ae5-ae64-733a4bbcc935 req-7d20d8e4-9afc-4365-a38c-b761908a1743 service nova] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Received event network-vif-deleted-4c3e3550-3780-4cf8-b191-9a82b2f340f2 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1205.556125] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a125624-98ba-41ca-9f39-2da4bb453a1d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.565733] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc219f3-9879-4111-975e-7b5f869cd883 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.581593] env[61978]: DEBUG nova.compute.provider_tree [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.621604] env[61978]: INFO nova.compute.manager [-] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Took 1.31 seconds to deallocate network for instance. [ 1205.635806] env[61978]: DEBUG nova.objects.instance [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'flavor' on Instance uuid a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.845031] env[61978]: DEBUG nova.compute.manager [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1205.875251] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1205.875496] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1205.875652] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleting the datastore file [datastore1] 97e128f9-7135-46b0-b22a-ee5449ba48b6 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1205.875936] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71d3bd71-6402-4dc9-ba02-cd54bfff609e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.884013] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1205.884013] env[61978]: value = "task-1395680" [ 1205.884013] env[61978]: _type = "Task" [ 1205.884013] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.893456] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395680, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.085073] env[61978]: DEBUG nova.scheduler.client.report [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1206.129389] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.370662] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.393880] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395680, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147716} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.394163] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1206.394382] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1206.394574] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1206.524573] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "68791dff-12e0-499d-8835-1e9173af570f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.524866] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "68791dff-12e0-499d-8835-1e9173af570f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.525112] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "68791dff-12e0-499d-8835-1e9173af570f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.525317] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "68791dff-12e0-499d-8835-1e9173af570f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.525492] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "68791dff-12e0-499d-8835-1e9173af570f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.527847] env[61978]: INFO nova.compute.manager [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Terminating instance [ 1206.529983] env[61978]: DEBUG nova.compute.manager [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1206.530204] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1206.531040] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8987aa1-30eb-4287-87be-54f77e652547 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.538386] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1206.538623] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8bd5f774-48d6-45f0-bb9f-72e67326d581 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.545076] env[61978]: DEBUG oslo_vmware.api [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1206.545076] env[61978]: value = "task-1395681" [ 1206.545076] env[61978]: _type = "Task" [ 1206.545076] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.552751] env[61978]: DEBUG oslo_vmware.api [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395681, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.590665] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.425s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.593674] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.080s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.595285] env[61978]: INFO nova.compute.claims [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1206.643195] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bcabf189-aaf5-4417-8b1f-afbd79d0134e tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.268s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.647959] env[61978]: INFO nova.network.neutron [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Updating port bba3eeec-259f-4ea3-b0f6-e509a29d33f4 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1207.054869] env[61978]: DEBUG oslo_vmware.api [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395681, 'name': PowerOffVM_Task, 'duration_secs': 0.327701} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.055138] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1207.055328] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1207.055593] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91631fdd-176b-4af9-94c4-2036a3852ce7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.114734] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1207.114982] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1207.115175] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleting the datastore file [datastore2] 68791dff-12e0-499d-8835-1e9173af570f {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1207.115444] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdf5f396-7257-491e-9338-01ae4464fe8d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.121894] env[61978]: DEBUG oslo_vmware.api [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1207.121894] env[61978]: value = "task-1395683" [ 1207.121894] env[61978]: _type = "Task" [ 1207.121894] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.130088] env[61978]: DEBUG oslo_vmware.api [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.149460] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "d3c82821-0617-4de6-8109-813a67910ed1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.149758] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "d3c82821-0617-4de6-8109-813a67910ed1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.229009] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.229313] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.278352] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.278669] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.428258] env[61978]: DEBUG nova.virt.hardware [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1207.428523] env[61978]: DEBUG nova.virt.hardware [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1207.428722] env[61978]: DEBUG nova.virt.hardware [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1207.428925] env[61978]: DEBUG nova.virt.hardware [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1207.429095] env[61978]: DEBUG nova.virt.hardware [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1207.429255] env[61978]: DEBUG nova.virt.hardware [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1207.429475] env[61978]: DEBUG nova.virt.hardware [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1207.429639] env[61978]: DEBUG nova.virt.hardware [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1207.429813] env[61978]: DEBUG nova.virt.hardware [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1207.429984] env[61978]: DEBUG nova.virt.hardware [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1207.430178] env[61978]: DEBUG nova.virt.hardware [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1207.431051] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc4008e-5b2c-4a7d-9f25-c257378e058c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.439087] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2d37bc-8aac-48c2-9cb4-40f0de42bcd3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.452103] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:9c:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c97a7df3-acef-43d7-9e3b-2117f142a29d', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1207.459349] env[61978]: DEBUG oslo.service.loopingcall [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1207.459591] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1207.459803] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9cc8a7bd-1c80-46b2-8d70-9ab3025729b4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.477303] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1207.477303] env[61978]: value = "task-1395684" [ 1207.477303] env[61978]: _type = "Task" [ 1207.477303] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.486180] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395684, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.634019] env[61978]: DEBUG oslo_vmware.api [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395683, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12301} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.634368] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1207.634603] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1207.634851] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1207.635080] env[61978]: INFO nova.compute.manager [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1207.635342] env[61978]: DEBUG oslo.service.loopingcall [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1207.635550] env[61978]: DEBUG nova.compute.manager [-] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1207.635644] env[61978]: DEBUG nova.network.neutron [-] [instance: 68791dff-12e0-499d-8835-1e9173af570f] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1207.653341] env[61978]: DEBUG nova.compute.utils [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1207.731984] env[61978]: DEBUG nova.compute.manager [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1207.781201] env[61978]: INFO nova.compute.manager [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Detaching volume 6f97516a-a581-42c8-8158-14d54c5b9874 [ 1207.815940] env[61978]: INFO nova.virt.block_device [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Attempting to driver detach volume 6f97516a-a581-42c8-8158-14d54c5b9874 from mountpoint /dev/sdc [ 1207.816283] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1207.816708] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295999', 'volume_id': '6f97516a-a581-42c8-8158-14d54c5b9874', 'name': 'volume-6f97516a-a581-42c8-8158-14d54c5b9874', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6', 'attached_at': '', 'detached_at': '', 'volume_id': '6f97516a-a581-42c8-8158-14d54c5b9874', 'serial': '6f97516a-a581-42c8-8158-14d54c5b9874'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1207.817681] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d08a92a-9b96-41ae-9582-caee832798f4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.849583] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccd0b21-0dd5-45fb-b9bb-e6b18fde6e52 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.856365] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-351fa757-49c3-45e5-aa94-6f5d201b2e7d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.877763] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2877509e-469f-4805-b6b9-bbd695a75577 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.891970] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] The volume has not been displaced from its original location: [datastore2] volume-6f97516a-a581-42c8-8158-14d54c5b9874/volume-6f97516a-a581-42c8-8158-14d54c5b9874.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1207.897104] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Reconfiguring VM instance instance-0000004b to detach disk 2002 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1207.900916] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3129475-302d-4a03-a189-d4676aec07f9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.913798] env[61978]: DEBUG nova.network.neutron [-] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.920326] env[61978]: DEBUG oslo_vmware.api [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1207.920326] env[61978]: value = "task-1395685" [ 1207.920326] env[61978]: _type = "Task" [ 1207.920326] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.925306] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187c363b-4d41-42e0-b8bf-6f65908c2422 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.934707] env[61978]: DEBUG oslo_vmware.api [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395685, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.937420] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac8db09-17f0-41ca-b75a-8f443484ecbd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.968026] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3211ed0d-8abd-419e-bb82-2cf812e9d1be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.975220] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303cd660-228c-4f12-b89f-7df4815e0b6b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.991496] env[61978]: DEBUG nova.compute.provider_tree [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.996113] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395684, 'name': CreateVM_Task, 'duration_secs': 0.379589} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.996760] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1207.997550] env[61978]: DEBUG oslo_concurrency.lockutils [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1207.997663] env[61978]: DEBUG oslo_concurrency.lockutils [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.998034] env[61978]: DEBUG oslo_concurrency.lockutils [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1207.998331] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba6db9b1-c887-4913-a67a-39a5ae061991 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.002893] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1208.002893] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b35ecf-fc5b-39c5-4cad-ee8110904e0d" [ 1208.002893] env[61978]: _type = "Task" [ 1208.002893] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.010868] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b35ecf-fc5b-39c5-4cad-ee8110904e0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.156915] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "d3c82821-0617-4de6-8109-813a67910ed1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.182639] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.182831] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.183024] env[61978]: DEBUG nova.network.neutron [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1208.253143] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.416759] env[61978]: INFO nova.compute.manager [-] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Took 0.78 seconds to deallocate network for instance. [ 1208.430052] env[61978]: DEBUG oslo_vmware.api [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395685, 'name': ReconfigVM_Task, 'duration_secs': 0.220105} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.430337] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Reconfigured VM instance instance-0000004b to detach disk 2002 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1208.435214] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a30fb769-bf93-4429-a571-6d884d5cbc66 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.452354] env[61978]: DEBUG oslo_vmware.api [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1208.452354] env[61978]: value = "task-1395686" [ 1208.452354] env[61978]: _type = "Task" [ 1208.452354] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.463145] env[61978]: DEBUG oslo_vmware.api [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395686, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.498476] env[61978]: DEBUG nova.scheduler.client.report [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1208.512578] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b35ecf-fc5b-39c5-4cad-ee8110904e0d, 'name': SearchDatastore_Task, 'duration_secs': 0.015704} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.512882] env[61978]: DEBUG oslo_concurrency.lockutils [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1208.513125] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1208.513398] env[61978]: DEBUG oslo_concurrency.lockutils [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.513555] env[61978]: DEBUG oslo_concurrency.lockutils [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.513743] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1208.514010] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e89e23a-150f-4c76-a979-3675a2c732e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.522089] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1208.522289] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1208.522987] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ca13eb0-5f97-42a3-a499-6b2065688683 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.528961] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1208.528961] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520bc2ce-3908-dc65-02a8-cf7151a11f12" [ 1208.528961] env[61978]: _type = "Task" [ 1208.528961] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.536867] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520bc2ce-3908-dc65-02a8-cf7151a11f12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.926621] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.927944] env[61978]: DEBUG nova.network.neutron [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Updating instance_info_cache with network_info: [{"id": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "address": "fa:16:3e:1e:db:c1", "network": {"id": "aa63ec3f-fde3-49f2-ab12-71ae85601428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-445619089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198eab494c0a4e0eb83bae5732df9c78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "650f7968-4522-4ba5-8304-1b9949951ed7", "external-id": "nsx-vlan-transportzone-568", "segmentation_id": 568, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbba3eeec-25", "ovs_interfaceid": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.962478] env[61978]: DEBUG oslo_vmware.api [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395686, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.003239] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.003925] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1209.007038] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.460s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.008242] env[61978]: INFO nova.compute.claims [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1209.039478] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520bc2ce-3908-dc65-02a8-cf7151a11f12, 'name': SearchDatastore_Task, 'duration_secs': 0.015176} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.041054] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d8cc154-d659-4055-a0f5-501ff7efc254 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.047493] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1209.047493] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52853711-1cb7-b487-27cc-e76c6d68ea8d" [ 1209.047493] env[61978]: _type = "Task" [ 1209.047493] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.057016] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52853711-1cb7-b487-27cc-e76c6d68ea8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.212120] env[61978]: DEBUG nova.compute.manager [req-050ccaae-0c80-49ec-922c-537c54dbfc81 req-c7769714-d11a-4e0c-96c7-16afbeb06912 service nova] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Received event network-vif-deleted-73fe675b-ef9e-44db-a9d2-13d68f04aacb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1209.225827] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "d3c82821-0617-4de6-8109-813a67910ed1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.226081] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "d3c82821-0617-4de6-8109-813a67910ed1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.226372] env[61978]: INFO nova.compute.manager [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Attaching volume fe3a2276-228e-421f-80d6-1ae89c15e505 to /dev/sdb [ 1209.259801] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc1ac8e-f348-4ef6-a9c2-563b5653bb62 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.266944] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7fe61d1-3ba1-4542-80e3-7d934b689172 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.271126] env[61978]: DEBUG nova.compute.manager [req-2f37ba6f-7473-4316-803d-da06edbeff6f req-e20d280a-3960-4c48-8926-0c8fb3c20559 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Received event network-vif-plugged-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1209.271345] env[61978]: DEBUG oslo_concurrency.lockutils [req-2f37ba6f-7473-4316-803d-da06edbeff6f req-e20d280a-3960-4c48-8926-0c8fb3c20559 service nova] Acquiring lock "17c56c1c-9992-4559-ad23-c68909ae6792-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.271564] env[61978]: DEBUG oslo_concurrency.lockutils [req-2f37ba6f-7473-4316-803d-da06edbeff6f req-e20d280a-3960-4c48-8926-0c8fb3c20559 service nova] Lock "17c56c1c-9992-4559-ad23-c68909ae6792-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.271737] env[61978]: DEBUG oslo_concurrency.lockutils [req-2f37ba6f-7473-4316-803d-da06edbeff6f req-e20d280a-3960-4c48-8926-0c8fb3c20559 service nova] Lock "17c56c1c-9992-4559-ad23-c68909ae6792-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.271908] env[61978]: DEBUG nova.compute.manager [req-2f37ba6f-7473-4316-803d-da06edbeff6f req-e20d280a-3960-4c48-8926-0c8fb3c20559 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] No waiting events found dispatching network-vif-plugged-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1209.272094] env[61978]: WARNING nova.compute.manager [req-2f37ba6f-7473-4316-803d-da06edbeff6f req-e20d280a-3960-4c48-8926-0c8fb3c20559 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Received unexpected event network-vif-plugged-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 for instance with vm_state shelved_offloaded and task_state spawning. [ 1209.283246] env[61978]: DEBUG nova.virt.block_device [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating existing volume attachment record: ed81ba82-41f6-47e9-8716-61274d7527fe {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1209.431053] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Releasing lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.459416] env[61978]: DEBUG nova.virt.hardware [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='5b1f714cb520780a1ab527a994a2163f',container_format='bare',created_at=2024-11-04T15:08:56Z,direct_url=,disk_format='vmdk',id=443a8916-4f98-4cb9-9e27-49dd792e901d,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-679885889-shelved',owner='198eab494c0a4e0eb83bae5732df9c78',properties=ImageMetaProps,protected=,size=31661568,status='active',tags=,updated_at=2024-11-04T15:09:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1209.459588] env[61978]: DEBUG nova.virt.hardware [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1209.459757] env[61978]: DEBUG nova.virt.hardware [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1209.459946] env[61978]: DEBUG nova.virt.hardware [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1209.460116] env[61978]: DEBUG nova.virt.hardware [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1209.460274] env[61978]: DEBUG nova.virt.hardware [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1209.460502] env[61978]: DEBUG nova.virt.hardware [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1209.460670] env[61978]: DEBUG nova.virt.hardware [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1209.460846] env[61978]: DEBUG nova.virt.hardware [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1209.461023] env[61978]: DEBUG nova.virt.hardware [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1209.461210] env[61978]: DEBUG nova.virt.hardware [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1209.461951] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa34d0b-2a9c-4417-b677-db97cf85a1e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.468233] env[61978]: DEBUG oslo_vmware.api [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395686, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.472842] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b46f1c-0305-4208-94c8-effd61105ed2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.487217] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:db:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '650f7968-4522-4ba5-8304-1b9949951ed7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bba3eeec-259f-4ea3-b0f6-e509a29d33f4', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1209.494448] env[61978]: DEBUG oslo.service.loopingcall [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1209.494718] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1209.494941] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f416ec6-688e-455e-9a04-a3cf8b585ee2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.512026] env[61978]: DEBUG nova.compute.utils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1209.516101] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1209.516276] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1209.518244] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1209.518244] env[61978]: value = "task-1395688" [ 1209.518244] env[61978]: _type = "Task" [ 1209.518244] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.526527] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395688, 'name': CreateVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.556875] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52853711-1cb7-b487-27cc-e76c6d68ea8d, 'name': SearchDatastore_Task, 'duration_secs': 0.009116} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.557243] env[61978]: DEBUG oslo_concurrency.lockutils [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.557444] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 97e128f9-7135-46b0-b22a-ee5449ba48b6/97e128f9-7135-46b0-b22a-ee5449ba48b6.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1209.557713] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c579ea3c-487e-4355-9464-bc9f0e8180b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.564455] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1209.564455] env[61978]: value = "task-1395689" [ 1209.564455] env[61978]: _type = "Task" [ 1209.564455] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.565609] env[61978]: DEBUG nova.policy [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a1deeccfbba41a8adf1dc84c58eda1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b43df2aa7c044e0fad5c8f01741dacde', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1209.574895] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.842178] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Successfully created port: 2759dd21-49e4-41f6-a462-e567457ab39c {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1209.963535] env[61978]: DEBUG oslo_vmware.api [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395686, 'name': ReconfigVM_Task, 'duration_secs': 1.144078} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.963931] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-295999', 'volume_id': '6f97516a-a581-42c8-8158-14d54c5b9874', 'name': 'volume-6f97516a-a581-42c8-8158-14d54c5b9874', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6', 'attached_at': '', 'detached_at': '', 'volume_id': '6f97516a-a581-42c8-8158-14d54c5b9874', 'serial': '6f97516a-a581-42c8-8158-14d54c5b9874'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1210.020469] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1210.040572] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395688, 'name': CreateVM_Task, 'duration_secs': 0.365259} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.040759] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1210.041611] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1210.041795] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.042273] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1210.042503] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87b47aa7-f548-49aa-aad8-e71b602d29f6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.048206] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1210.048206] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527d36df-7d4c-2e5f-d369-5a8e7d250c67" [ 1210.048206] env[61978]: _type = "Task" [ 1210.048206] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.058673] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527d36df-7d4c-2e5f-d369-5a8e7d250c67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.075646] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395689, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480895} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.078193] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 97e128f9-7135-46b0-b22a-ee5449ba48b6/97e128f9-7135-46b0-b22a-ee5449ba48b6.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1210.078441] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1210.079461] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f291c36c-49d6-4b6c-8dfd-03a43b8c6a3c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.085445] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1210.085445] env[61978]: value = "task-1395692" [ 1210.085445] env[61978]: _type = "Task" [ 1210.085445] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.097993] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395692, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.297714] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec4e8e3-e905-45bb-b5cc-62310990dcd5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.305741] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86805d49-179b-4641-8b30-4eb5f5a46c6d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.335033] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c766c1fc-dbe2-4186-b579-60c889167286 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.341844] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52150a16-054a-411e-ab19-904af64ef6cb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.354653] env[61978]: DEBUG nova.compute.provider_tree [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1210.521788] env[61978]: DEBUG nova.objects.instance [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'flavor' on Instance uuid a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1210.558966] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.559233] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Processing image 443a8916-4f98-4cb9-9e27-49dd792e901d {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1210.559511] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d/443a8916-4f98-4cb9-9e27-49dd792e901d.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1210.559824] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d/443a8916-4f98-4cb9-9e27-49dd792e901d.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.560097] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1210.560860] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd368bb4-be02-4f2f-9fcb-52f7996fe9de {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.569261] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1210.569528] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1210.570420] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79c47ec0-f93a-4639-8a93-e4ad3e37cbf8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.575403] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1210.575403] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f01f0c-2d19-6057-a6b9-fa89c2c22567" [ 1210.575403] env[61978]: _type = "Task" [ 1210.575403] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.582908] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f01f0c-2d19-6057-a6b9-fa89c2c22567, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.594510] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395692, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074879} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.594755] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1210.595665] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78dc36b9-8bdd-4e20-974f-6516a0ea8eec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.616293] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 97e128f9-7135-46b0-b22a-ee5449ba48b6/97e128f9-7135-46b0-b22a-ee5449ba48b6.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1210.616545] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52ffa70a-e175-418c-b6ed-548ea734bf5a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.634818] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1210.634818] env[61978]: value = "task-1395693" [ 1210.634818] env[61978]: _type = "Task" [ 1210.634818] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.643612] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395693, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.858013] env[61978]: DEBUG nova.scheduler.client.report [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1211.034841] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1211.059718] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1211.060021] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1211.060203] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1211.060398] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1211.060557] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1211.060712] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1211.060925] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1211.061112] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1211.061336] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1211.061516] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1211.061696] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1211.062558] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aba8108-658b-49eb-b09e-1b18ce3d7c33 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.070722] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32326f71-fefa-4766-b79c-36eb42608f1e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.090966] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Preparing fetch location {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1211.091244] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Fetch image to [datastore2] OSTACK_IMG_484023df-8e16-41f0-9afc-2a7fed2ca270/OSTACK_IMG_484023df-8e16-41f0-9afc-2a7fed2ca270.vmdk {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1211.091435] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Downloading stream optimized image 443a8916-4f98-4cb9-9e27-49dd792e901d to [datastore2] OSTACK_IMG_484023df-8e16-41f0-9afc-2a7fed2ca270/OSTACK_IMG_484023df-8e16-41f0-9afc-2a7fed2ca270.vmdk on the data store datastore2 as vApp {{(pid=61978) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1211.091614] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Downloading image file data 443a8916-4f98-4cb9-9e27-49dd792e901d to the ESX as VM named 'OSTACK_IMG_484023df-8e16-41f0-9afc-2a7fed2ca270' {{(pid=61978) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1211.144900] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395693, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.160230] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1211.160230] env[61978]: value = "resgroup-9" [ 1211.160230] env[61978]: _type = "ResourcePool" [ 1211.160230] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1211.160230] env[61978]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-e3e8c482-1158-4383-8566-477028a590f5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.180323] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lease: (returnval){ [ 1211.180323] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521102ef-041b-702b-5763-82d433767353" [ 1211.180323] env[61978]: _type = "HttpNfcLease" [ 1211.180323] env[61978]: } obtained for vApp import into resource pool (val){ [ 1211.180323] env[61978]: value = "resgroup-9" [ 1211.180323] env[61978]: _type = "ResourcePool" [ 1211.180323] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1211.180646] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the lease: (returnval){ [ 1211.180646] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521102ef-041b-702b-5763-82d433767353" [ 1211.180646] env[61978]: _type = "HttpNfcLease" [ 1211.180646] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1211.186821] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1211.186821] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521102ef-041b-702b-5763-82d433767353" [ 1211.186821] env[61978]: _type = "HttpNfcLease" [ 1211.186821] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1211.366077] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.357s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.366077] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1211.367070] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.778s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.369771] env[61978]: INFO nova.compute.claims [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1211.379361] env[61978]: DEBUG nova.compute.manager [req-c80f1635-aef6-4479-8c37-f4b868b69532 req-0578db08-315e-410b-8990-96983a707016 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Received event network-changed-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1211.379361] env[61978]: DEBUG nova.compute.manager [req-c80f1635-aef6-4479-8c37-f4b868b69532 req-0578db08-315e-410b-8990-96983a707016 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Refreshing instance network info cache due to event network-changed-bba3eeec-259f-4ea3-b0f6-e509a29d33f4. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1211.379526] env[61978]: DEBUG oslo_concurrency.lockutils [req-c80f1635-aef6-4479-8c37-f4b868b69532 req-0578db08-315e-410b-8990-96983a707016 service nova] Acquiring lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1211.379568] env[61978]: DEBUG oslo_concurrency.lockutils [req-c80f1635-aef6-4479-8c37-f4b868b69532 req-0578db08-315e-410b-8990-96983a707016 service nova] Acquired lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.379713] env[61978]: DEBUG nova.network.neutron [req-c80f1635-aef6-4479-8c37-f4b868b69532 req-0578db08-315e-410b-8990-96983a707016 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Refreshing network info cache for port bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1211.515443] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Successfully updated port: 2759dd21-49e4-41f6-a462-e567457ab39c {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1211.530402] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6310886e-7229-4c12-b921-79b73251d5b6 tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.251s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.646179] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395693, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.689541] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1211.689541] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521102ef-041b-702b-5763-82d433767353" [ 1211.689541] env[61978]: _type = "HttpNfcLease" [ 1211.689541] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1211.873464] env[61978]: DEBUG nova.compute.utils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1211.876762] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1211.876932] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1211.916237] env[61978]: DEBUG nova.policy [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a1deeccfbba41a8adf1dc84c58eda1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b43df2aa7c044e0fad5c8f01741dacde', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1212.015133] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "refresh_cache-90a38dba-0dae-455a-8d02-44c2bb098fb5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1212.015133] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquired lock "refresh_cache-90a38dba-0dae-455a-8d02-44c2bb098fb5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.015133] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1212.093589] env[61978]: DEBUG nova.network.neutron [req-c80f1635-aef6-4479-8c37-f4b868b69532 req-0578db08-315e-410b-8990-96983a707016 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Updated VIF entry in instance network info cache for port bba3eeec-259f-4ea3-b0f6-e509a29d33f4. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1212.093589] env[61978]: DEBUG nova.network.neutron [req-c80f1635-aef6-4479-8c37-f4b868b69532 req-0578db08-315e-410b-8990-96983a707016 service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Updating instance_info_cache with network_info: [{"id": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "address": "fa:16:3e:1e:db:c1", "network": {"id": "aa63ec3f-fde3-49f2-ab12-71ae85601428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-445619089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198eab494c0a4e0eb83bae5732df9c78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "650f7968-4522-4ba5-8304-1b9949951ed7", "external-id": "nsx-vlan-transportzone-568", "segmentation_id": 568, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbba3eeec-25", "ovs_interfaceid": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.147653] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395693, 'name': ReconfigVM_Task, 'duration_secs': 1.043811} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.147991] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 97e128f9-7135-46b0-b22a-ee5449ba48b6/97e128f9-7135-46b0-b22a-ee5449ba48b6.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1212.149372] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a3b117a-396d-4663-af06-583ee7ed4888 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.157709] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1212.157709] env[61978]: value = "task-1395696" [ 1212.157709] env[61978]: _type = "Task" [ 1212.157709] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.173536] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395696, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.194043] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1212.194043] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521102ef-041b-702b-5763-82d433767353" [ 1212.194043] env[61978]: _type = "HttpNfcLease" [ 1212.194043] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1212.194043] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1212.194043] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521102ef-041b-702b-5763-82d433767353" [ 1212.194043] env[61978]: _type = "HttpNfcLease" [ 1212.194043] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1212.194043] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bc7a34-6874-474e-8b48-cdff53029d32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.202864] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b1a813-d99e-c37a-85a5-a82e5ed9665c/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1212.203151] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Creating HTTP connection to write to file with size = 31661568 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b1a813-d99e-c37a-85a5-a82e5ed9665c/disk-0.vmdk. {{(pid=61978) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1212.204963] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Successfully created port: 58e26b61-334b-4383-b787-c9cb140c549e {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1212.279378] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e118451a-d916-45e7-a1b2-46f660caaf38 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.377149] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1212.551078] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1212.593776] env[61978]: DEBUG oslo_concurrency.lockutils [req-c80f1635-aef6-4479-8c37-f4b868b69532 req-0578db08-315e-410b-8990-96983a707016 service nova] Releasing lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1212.671571] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395696, 'name': Rename_Task, 'duration_secs': 0.127306} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.676438] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1212.677832] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c557b7b-86ff-404c-8b0b-a8884b7d46ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.683564] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1212.683564] env[61978]: value = "task-1395697" [ 1212.683564] env[61978]: _type = "Task" [ 1212.683564] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.689735] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de35f880-8c2a-4ffc-a3ce-892d85bdf1c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.695353] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395697, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.701786] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7f0278-c0cc-4e6f-857a-a311a9b05e24 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.737502] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Updating instance_info_cache with network_info: [{"id": "2759dd21-49e4-41f6-a462-e567457ab39c", "address": "fa:16:3e:68:f2:02", "network": {"id": "5132b4be-ba71-4558-b493-ca23c53670af", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-2050667042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b43df2aa7c044e0fad5c8f01741dacde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2759dd21-49", "ovs_interfaceid": "2759dd21-49e4-41f6-a462-e567457ab39c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.741793] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d151ae27-7d58-4553-b118-877c568fa3d3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.750298] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b3533c-438c-4a7d-b093-7e8440f01070 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.768454] env[61978]: DEBUG nova.compute.provider_tree [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.876652] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.876652] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.876652] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.876775] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.876924] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.879075] env[61978]: INFO nova.compute.manager [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Terminating instance [ 1212.880865] env[61978]: DEBUG nova.compute.manager [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1212.881099] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1212.881944] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d902ea4a-3fb6-4565-9423-71b482e370ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.893190] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1212.893508] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-358bbf11-9f89-4a64-823b-ac5aede64aaa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.899896] env[61978]: DEBUG oslo_vmware.api [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1212.899896] env[61978]: value = "task-1395698" [ 1212.899896] env[61978]: _type = "Task" [ 1212.899896] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.907843] env[61978]: DEBUG oslo_vmware.api [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395698, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.194163] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395697, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.245271] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Releasing lock "refresh_cache-90a38dba-0dae-455a-8d02-44c2bb098fb5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1213.245617] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Instance network_info: |[{"id": "2759dd21-49e4-41f6-a462-e567457ab39c", "address": "fa:16:3e:68:f2:02", "network": {"id": "5132b4be-ba71-4558-b493-ca23c53670af", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-2050667042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b43df2aa7c044e0fad5c8f01741dacde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2759dd21-49", "ovs_interfaceid": "2759dd21-49e4-41f6-a462-e567457ab39c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1213.246083] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:f2:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2759dd21-49e4-41f6-a462-e567457ab39c', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1213.253642] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Creating folder: Project (b43df2aa7c044e0fad5c8f01741dacde). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1213.253985] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3190205e-84e3-4654-b95f-260399769234 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.264298] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Created folder: Project (b43df2aa7c044e0fad5c8f01741dacde) in parent group-v295764. [ 1213.264501] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Creating folder: Instances. Parent ref: group-v296006. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1213.264755] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5607344-94b3-4097-9ac0-458d9038df76 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.273552] env[61978]: DEBUG nova.scheduler.client.report [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1213.279993] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Created folder: Instances in parent group-v296006. [ 1213.280264] env[61978]: DEBUG oslo.service.loopingcall [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1213.280699] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1213.280905] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fdc33f0-395b-4aa0-ade0-70ab35edbd88 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.304537] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1213.304537] env[61978]: value = "task-1395701" [ 1213.304537] env[61978]: _type = "Task" [ 1213.304537] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.315832] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395701, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.391492] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1213.412351] env[61978]: DEBUG oslo_vmware.api [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395698, 'name': PowerOffVM_Task, 'duration_secs': 0.201813} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.414660] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1213.414873] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1213.415927] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d53fd185-c515-4288-ac29-2271cdcc75bf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.423127] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1213.423448] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1213.423688] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1213.423915] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1213.424133] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1213.424372] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1213.424718] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1213.424851] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1213.425083] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1213.425331] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1213.425561] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1213.426693] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a99ebd7-5f0b-45cf-8972-df40be3340a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.438543] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c4093b-dd27-4de1-946a-8a332630c3f5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.449353] env[61978]: DEBUG nova.compute.manager [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Received event network-vif-plugged-2759dd21-49e4-41f6-a462-e567457ab39c {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1213.449353] env[61978]: DEBUG oslo_concurrency.lockutils [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] Acquiring lock "90a38dba-0dae-455a-8d02-44c2bb098fb5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.449353] env[61978]: DEBUG oslo_concurrency.lockutils [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] Lock "90a38dba-0dae-455a-8d02-44c2bb098fb5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.449353] env[61978]: DEBUG oslo_concurrency.lockutils [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] Lock "90a38dba-0dae-455a-8d02-44c2bb098fb5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.449353] env[61978]: DEBUG nova.compute.manager [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] No waiting events found dispatching network-vif-plugged-2759dd21-49e4-41f6-a462-e567457ab39c {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1213.449954] env[61978]: WARNING nova.compute.manager [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Received unexpected event network-vif-plugged-2759dd21-49e4-41f6-a462-e567457ab39c for instance with vm_state building and task_state spawning. [ 1213.449954] env[61978]: DEBUG nova.compute.manager [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Received event network-changed-2759dd21-49e4-41f6-a462-e567457ab39c {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1213.449954] env[61978]: DEBUG nova.compute.manager [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Refreshing instance network info cache due to event network-changed-2759dd21-49e4-41f6-a462-e567457ab39c. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1213.450106] env[61978]: DEBUG oslo_concurrency.lockutils [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] Acquiring lock "refresh_cache-90a38dba-0dae-455a-8d02-44c2bb098fb5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1213.450215] env[61978]: DEBUG oslo_concurrency.lockutils [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] Acquired lock "refresh_cache-90a38dba-0dae-455a-8d02-44c2bb098fb5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.450384] env[61978]: DEBUG nova.network.neutron [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Refreshing network info cache for port 2759dd21-49e4-41f6-a462-e567457ab39c {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1213.487668] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1213.488028] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1213.488221] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Deleting the datastore file [datastore2] a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1213.488552] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e944cfb5-6688-4efd-9e48-300ba83a5523 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.496189] env[61978]: DEBUG oslo_vmware.api [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for the task: (returnval){ [ 1213.496189] env[61978]: value = "task-1395703" [ 1213.496189] env[61978]: _type = "Task" [ 1213.496189] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.505448] env[61978]: DEBUG oslo_vmware.api [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395703, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.599795] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Completed reading data from the image iterator. {{(pid=61978) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1213.600179] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b1a813-d99e-c37a-85a5-a82e5ed9665c/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1213.601207] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb9a0f9-ee8e-48b3-8a7f-1c857d918096 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.608182] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b1a813-d99e-c37a-85a5-a82e5ed9665c/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1213.608429] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b1a813-d99e-c37a-85a5-a82e5ed9665c/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1213.608713] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-3b4583f0-ecb1-4aa7-a699-a38ef85d1450 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.693997] env[61978]: DEBUG oslo_vmware.api [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395697, 'name': PowerOnVM_Task, 'duration_secs': 0.570741} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.694307] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1213.694528] env[61978]: DEBUG nova.compute.manager [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1213.695320] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe366c6-2b69-4cf4-9433-836820e30186 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.781726] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.782273] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1213.785065] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 10.659s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.817202] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395701, 'name': CreateVM_Task, 'duration_secs': 0.390676} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.817409] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1213.818137] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1213.819079] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.819079] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1213.819252] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aab622aa-7374-41dc-b0cc-a74a5fcb509a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.824557] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1213.824557] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a9dcbf-d2e0-673b-1678-b95e2d35537f" [ 1213.824557] env[61978]: _type = "Task" [ 1213.824557] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.832636] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a9dcbf-d2e0-673b-1678-b95e2d35537f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.910147] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Successfully updated port: 58e26b61-334b-4383-b787-c9cb140c549e {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1214.008496] env[61978]: DEBUG oslo_vmware.api [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Task: {'id': task-1395703, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173594} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.008911] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1214.009170] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1214.009365] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1214.009548] env[61978]: INFO nova.compute.manager [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1214.009789] env[61978]: DEBUG oslo.service.loopingcall [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1214.009984] env[61978]: DEBUG nova.compute.manager [-] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1214.010089] env[61978]: DEBUG nova.network.neutron [-] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1214.172356] env[61978]: DEBUG oslo_vmware.rw_handles [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b1a813-d99e-c37a-85a5-a82e5ed9665c/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1214.172596] env[61978]: INFO nova.virt.vmwareapi.images [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Downloaded image file data 443a8916-4f98-4cb9-9e27-49dd792e901d [ 1214.173551] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8b3240-958e-416a-9f72-c4a69aaff1a3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.191937] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2939a70a-bb79-45ef-a222-5a02d34228aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.213903] env[61978]: DEBUG oslo_concurrency.lockutils [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.261385] env[61978]: DEBUG nova.network.neutron [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Updated VIF entry in instance network info cache for port 2759dd21-49e4-41f6-a462-e567457ab39c. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1214.262735] env[61978]: DEBUG nova.network.neutron [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Updating instance_info_cache with network_info: [{"id": "2759dd21-49e4-41f6-a462-e567457ab39c", "address": "fa:16:3e:68:f2:02", "network": {"id": "5132b4be-ba71-4558-b493-ca23c53670af", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-2050667042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b43df2aa7c044e0fad5c8f01741dacde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2759dd21-49", "ovs_interfaceid": "2759dd21-49e4-41f6-a462-e567457ab39c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.288852] env[61978]: DEBUG nova.compute.utils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1214.292804] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1214.292995] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1214.334172] env[61978]: DEBUG nova.policy [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a1deeccfbba41a8adf1dc84c58eda1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b43df2aa7c044e0fad5c8f01741dacde', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1214.340468] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a9dcbf-d2e0-673b-1678-b95e2d35537f, 'name': SearchDatastore_Task, 'duration_secs': 0.019721} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.340660] env[61978]: INFO nova.virt.vmwareapi.images [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] The imported VM was unregistered [ 1214.343044] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Caching image {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1214.343273] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Creating directory with path [datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1214.346378] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1214.346625] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1214.346860] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1214.347023] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.347204] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1214.347480] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b56c92b1-6d1b-4756-9e5d-c41fe09e4928 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.349690] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dd59046-8fc8-4921-aca9-0815846f1908 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.362495] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1214.362699] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1214.363401] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b54d1301-7ce8-467e-bf4b-09283a3b1965 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.368702] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1214.368702] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52db3a79-2385-e575-f655-ab41959e6ebe" [ 1214.368702] env[61978]: _type = "Task" [ 1214.368702] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.369842] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Created directory with path [datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1214.370073] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_484023df-8e16-41f0-9afc-2a7fed2ca270/OSTACK_IMG_484023df-8e16-41f0-9afc-2a7fed2ca270.vmdk to [datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d/443a8916-4f98-4cb9-9e27-49dd792e901d.vmdk. {{(pid=61978) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1214.372979] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-8a9c6c83-f6c8-4b58-bc9f-490c0309eaad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.377915] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1214.378149] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296004', 'volume_id': 'fe3a2276-228e-421f-80d6-1ae89c15e505', 'name': 'volume-fe3a2276-228e-421f-80d6-1ae89c15e505', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3c82821-0617-4de6-8109-813a67910ed1', 'attached_at': '', 'detached_at': '', 'volume_id': 'fe3a2276-228e-421f-80d6-1ae89c15e505', 'serial': 'fe3a2276-228e-421f-80d6-1ae89c15e505'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1214.378907] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207d473a-0622-4693-973d-9a89eb559df3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.385914] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52db3a79-2385-e575-f655-ab41959e6ebe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.397153] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1214.397153] env[61978]: value = "task-1395705" [ 1214.397153] env[61978]: _type = "Task" [ 1214.397153] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.399993] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75cbcac6-8506-41f1-8ca4-8e316e45e8d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.422802] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "refresh_cache-b76dd94e-c14b-48d4-bb7f-020313412ca2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1214.422950] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquired lock "refresh_cache-b76dd94e-c14b-48d4-bb7f-020313412ca2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.423113] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1214.432853] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] volume-fe3a2276-228e-421f-80d6-1ae89c15e505/volume-fe3a2276-228e-421f-80d6-1ae89c15e505.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1214.440071] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94c952b6-b098-4699-a45a-c4a1f3701d1f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.457144] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395705, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.463698] env[61978]: DEBUG oslo_vmware.api [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1214.463698] env[61978]: value = "task-1395706" [ 1214.463698] env[61978]: _type = "Task" [ 1214.463698] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.475491] env[61978]: DEBUG oslo_vmware.api [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395706, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.505426] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1214.664126] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce86893-1461-474b-84ba-83b42ef6d6d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.674602] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c7a6f0-3a8a-493c-8d7d-35246a89f1df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.711346] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Successfully created port: 5db0518b-5552-40e5-80e3-e15e330660eb {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1214.714527] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e8db62-1dcb-4fd8-b408-3f4467c2e9e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.718022] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Updating instance_info_cache with network_info: [{"id": "58e26b61-334b-4383-b787-c9cb140c549e", "address": "fa:16:3e:d5:e9:86", "network": {"id": "5132b4be-ba71-4558-b493-ca23c53670af", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-2050667042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b43df2aa7c044e0fad5c8f01741dacde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58e26b61-33", "ovs_interfaceid": "58e26b61-334b-4383-b787-c9cb140c549e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.728412] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ff99b6-4079-4558-9133-e06c5fc160b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.742809] env[61978]: DEBUG nova.compute.provider_tree [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1214.766249] env[61978]: DEBUG oslo_concurrency.lockutils [req-fc9e5328-4158-42dd-a47f-b914310b6545 req-58c2836b-34ca-47d0-b75d-dfd88b4dfa0a service nova] Releasing lock "refresh_cache-90a38dba-0dae-455a-8d02-44c2bb098fb5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1214.794188] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1214.881828] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52db3a79-2385-e575-f655-ab41959e6ebe, 'name': SearchDatastore_Task, 'duration_secs': 0.024769} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.883071] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33e61194-cb9e-4b6e-a91d-a34eecd2a99f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.893479] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1214.893479] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b84795-f7ac-c040-553d-35e0c0c83eaa" [ 1214.893479] env[61978]: _type = "Task" [ 1214.893479] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.903917] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b84795-f7ac-c040-553d-35e0c0c83eaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.914705] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395705, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.978119] env[61978]: DEBUG oslo_vmware.api [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395706, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.130976] env[61978]: DEBUG nova.compute.manager [req-d8b97416-e48b-48ec-a3f3-0aec19ed2fcc req-f8b623e3-c689-48d6-aac1-f751da97cd8d service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Received event network-vif-deleted-e0d58422-d319-4563-81b9-65c067c4b306 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1215.131056] env[61978]: INFO nova.compute.manager [req-d8b97416-e48b-48ec-a3f3-0aec19ed2fcc req-f8b623e3-c689-48d6-aac1-f751da97cd8d service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Neutron deleted interface e0d58422-d319-4563-81b9-65c067c4b306; detaching it from the instance and deleting it from the info cache [ 1215.131241] env[61978]: DEBUG nova.network.neutron [req-d8b97416-e48b-48ec-a3f3-0aec19ed2fcc req-f8b623e3-c689-48d6-aac1-f751da97cd8d service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.181474] env[61978]: DEBUG nova.network.neutron [-] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.220730] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Releasing lock "refresh_cache-b76dd94e-c14b-48d4-bb7f-020313412ca2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1215.221132] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Instance network_info: |[{"id": "58e26b61-334b-4383-b787-c9cb140c549e", "address": "fa:16:3e:d5:e9:86", "network": {"id": "5132b4be-ba71-4558-b493-ca23c53670af", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-2050667042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b43df2aa7c044e0fad5c8f01741dacde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58e26b61-33", "ovs_interfaceid": "58e26b61-334b-4383-b787-c9cb140c549e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1215.221595] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:e9:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58e26b61-334b-4383-b787-c9cb140c549e', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1215.229558] env[61978]: DEBUG oslo.service.loopingcall [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1215.230157] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1215.230404] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8967c92-1008-4bdb-a9e7-132ef8420f7d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.250981] env[61978]: DEBUG nova.scheduler.client.report [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1215.256039] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1215.256039] env[61978]: value = "task-1395707" [ 1215.256039] env[61978]: _type = "Task" [ 1215.256039] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.267553] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395707, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.407889] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b84795-f7ac-c040-553d-35e0c0c83eaa, 'name': SearchDatastore_Task, 'duration_secs': 0.014864} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.411538] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1215.412267] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 90a38dba-0dae-455a-8d02-44c2bb098fb5/90a38dba-0dae-455a-8d02-44c2bb098fb5.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1215.412572] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e62b768-d107-486f-b1bc-bec2687b1491 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.422694] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395705, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.424103] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1215.424103] env[61978]: value = "task-1395708" [ 1215.424103] env[61978]: _type = "Task" [ 1215.424103] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.435480] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395708, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.454573] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "97e128f9-7135-46b0-b22a-ee5449ba48b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.455086] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "97e128f9-7135-46b0-b22a-ee5449ba48b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.455423] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "97e128f9-7135-46b0-b22a-ee5449ba48b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.455777] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "97e128f9-7135-46b0-b22a-ee5449ba48b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.456213] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "97e128f9-7135-46b0-b22a-ee5449ba48b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.459769] env[61978]: INFO nova.compute.manager [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Terminating instance [ 1215.463104] env[61978]: DEBUG nova.compute.manager [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1215.463390] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1215.465053] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366dd3cb-b8a9-4c40-8962-9183cb5ee41c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.485242] env[61978]: DEBUG oslo_vmware.api [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395706, 'name': ReconfigVM_Task, 'duration_secs': 0.838908} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.488040] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfigured VM instance instance-00000050 to attach disk [datastore2] volume-fe3a2276-228e-421f-80d6-1ae89c15e505/volume-fe3a2276-228e-421f-80d6-1ae89c15e505.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1215.493537] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1215.493881] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2d75bab-995b-4438-be4b-da59efe444e5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.504762] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-680a22f4-853a-4388-8fe8-5d3135f29fc5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.516194] env[61978]: DEBUG oslo_vmware.api [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1215.516194] env[61978]: value = "task-1395710" [ 1215.516194] env[61978]: _type = "Task" [ 1215.516194] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.516540] env[61978]: DEBUG oslo_vmware.api [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1215.516540] env[61978]: value = "task-1395709" [ 1215.516540] env[61978]: _type = "Task" [ 1215.516540] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.538016] env[61978]: DEBUG oslo_vmware.api [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395709, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.538468] env[61978]: DEBUG oslo_vmware.api [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395710, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.551704] env[61978]: DEBUG nova.compute.manager [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Received event network-vif-plugged-58e26b61-334b-4383-b787-c9cb140c549e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1215.551704] env[61978]: DEBUG oslo_concurrency.lockutils [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] Acquiring lock "b76dd94e-c14b-48d4-bb7f-020313412ca2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.551946] env[61978]: DEBUG oslo_concurrency.lockutils [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] Lock "b76dd94e-c14b-48d4-bb7f-020313412ca2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.552179] env[61978]: DEBUG oslo_concurrency.lockutils [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] Lock "b76dd94e-c14b-48d4-bb7f-020313412ca2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.552394] env[61978]: DEBUG nova.compute.manager [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] No waiting events found dispatching network-vif-plugged-58e26b61-334b-4383-b787-c9cb140c549e {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1215.552652] env[61978]: WARNING nova.compute.manager [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Received unexpected event network-vif-plugged-58e26b61-334b-4383-b787-c9cb140c549e for instance with vm_state building and task_state spawning. [ 1215.552770] env[61978]: DEBUG nova.compute.manager [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Received event network-changed-58e26b61-334b-4383-b787-c9cb140c549e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1215.552937] env[61978]: DEBUG nova.compute.manager [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Refreshing instance network info cache due to event network-changed-58e26b61-334b-4383-b787-c9cb140c549e. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1215.553166] env[61978]: DEBUG oslo_concurrency.lockutils [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] Acquiring lock "refresh_cache-b76dd94e-c14b-48d4-bb7f-020313412ca2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1215.553357] env[61978]: DEBUG oslo_concurrency.lockutils [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] Acquired lock "refresh_cache-b76dd94e-c14b-48d4-bb7f-020313412ca2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.553548] env[61978]: DEBUG nova.network.neutron [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Refreshing network info cache for port 58e26b61-334b-4383-b787-c9cb140c549e {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1215.633785] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1cb4efc2-6f86-425e-b543-74bb47549a81 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.645916] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af5b8d8-028f-412f-b65c-31804d797156 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.682194] env[61978]: DEBUG nova.compute.manager [req-d8b97416-e48b-48ec-a3f3-0aec19ed2fcc req-f8b623e3-c689-48d6-aac1-f751da97cd8d service nova] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Detach interface failed, port_id=e0d58422-d319-4563-81b9-65c067c4b306, reason: Instance a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1215.684197] env[61978]: INFO nova.compute.manager [-] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Took 1.67 seconds to deallocate network for instance. [ 1215.773621] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395707, 'name': CreateVM_Task, 'duration_secs': 0.433383} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.774567] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1215.775355] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1215.775564] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.775941] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1215.776559] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce822106-30f9-4847-a42e-5ddb7f665714 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.784432] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1215.784432] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5287684c-a966-a9fe-cce2-ecf2e40520f7" [ 1215.784432] env[61978]: _type = "Task" [ 1215.784432] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.794583] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5287684c-a966-a9fe-cce2-ecf2e40520f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.804582] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1215.835802] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1215.836096] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1215.836311] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1215.836573] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1215.836756] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1215.836917] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1215.837159] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1215.837418] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1215.837566] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1215.837721] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1215.837903] env[61978]: DEBUG nova.virt.hardware [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1215.838921] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9ab368-b5f1-4e44-8771-f6ee61b54bfc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.850825] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0501d267-e778-4d9a-afb8-486585b4fbb6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.919423] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395705, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.938648] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395708, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.035687] env[61978]: DEBUG oslo_vmware.api [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395709, 'name': ReconfigVM_Task, 'duration_secs': 0.262045} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.036018] env[61978]: DEBUG oslo_vmware.api [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395710, 'name': PowerOffVM_Task, 'duration_secs': 0.44563} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.036433] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296004', 'volume_id': 'fe3a2276-228e-421f-80d6-1ae89c15e505', 'name': 'volume-fe3a2276-228e-421f-80d6-1ae89c15e505', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3c82821-0617-4de6-8109-813a67910ed1', 'attached_at': '', 'detached_at': '', 'volume_id': 'fe3a2276-228e-421f-80d6-1ae89c15e505', 'serial': 'fe3a2276-228e-421f-80d6-1ae89c15e505'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1216.038140] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1216.038415] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1216.038940] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-491e10c1-e4cb-4ae3-88a2-589a2b1bd0d3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.133919] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1216.133919] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1216.133919] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleting the datastore file [datastore2] 97e128f9-7135-46b0-b22a-ee5449ba48b6 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1216.133919] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce12d0c5-c765-461f-8ec1-abdaf9885f11 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.142867] env[61978]: DEBUG oslo_vmware.api [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1216.142867] env[61978]: value = "task-1395712" [ 1216.142867] env[61978]: _type = "Task" [ 1216.142867] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.153671] env[61978]: DEBUG oslo_vmware.api [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.192213] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1216.267551] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.482s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.270534] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.528s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.272132] env[61978]: INFO nova.compute.claims [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1216.303174] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5287684c-a966-a9fe-cce2-ecf2e40520f7, 'name': SearchDatastore_Task, 'duration_secs': 0.065395} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.303648] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1216.303936] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1216.304191] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1216.304377] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.304581] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1216.304878] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5a62a3d-c1a5-4161-a830-24cd0900bb2a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.318387] env[61978]: DEBUG nova.network.neutron [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Updated VIF entry in instance network info cache for port 58e26b61-334b-4383-b787-c9cb140c549e. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1216.318760] env[61978]: DEBUG nova.network.neutron [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Updating instance_info_cache with network_info: [{"id": "58e26b61-334b-4383-b787-c9cb140c549e", "address": "fa:16:3e:d5:e9:86", "network": {"id": "5132b4be-ba71-4558-b493-ca23c53670af", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-2050667042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b43df2aa7c044e0fad5c8f01741dacde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58e26b61-33", "ovs_interfaceid": "58e26b61-334b-4383-b787-c9cb140c549e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.327022] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1216.327261] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1216.328216] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6365a988-32c7-4b98-a789-1f47d8fcdeb2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.336733] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1216.336733] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52773246-7bed-4223-a9ed-eab90737c996" [ 1216.336733] env[61978]: _type = "Task" [ 1216.336733] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.342453] env[61978]: DEBUG nova.compute.manager [req-be15c63d-d12c-4c3a-93b3-cd10e96d6d68 req-2a46c756-9255-4ed4-b339-055546f3f110 service nova] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Received event network-vif-plugged-5db0518b-5552-40e5-80e3-e15e330660eb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1216.342787] env[61978]: DEBUG oslo_concurrency.lockutils [req-be15c63d-d12c-4c3a-93b3-cd10e96d6d68 req-2a46c756-9255-4ed4-b339-055546f3f110 service nova] Acquiring lock "e9b70b36-d0d8-430e-a5e7-588d3c75d7ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1216.342899] env[61978]: DEBUG oslo_concurrency.lockutils [req-be15c63d-d12c-4c3a-93b3-cd10e96d6d68 req-2a46c756-9255-4ed4-b339-055546f3f110 service nova] Lock "e9b70b36-d0d8-430e-a5e7-588d3c75d7ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.343094] env[61978]: DEBUG oslo_concurrency.lockutils [req-be15c63d-d12c-4c3a-93b3-cd10e96d6d68 req-2a46c756-9255-4ed4-b339-055546f3f110 service nova] Lock "e9b70b36-d0d8-430e-a5e7-588d3c75d7ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.343281] env[61978]: DEBUG nova.compute.manager [req-be15c63d-d12c-4c3a-93b3-cd10e96d6d68 req-2a46c756-9255-4ed4-b339-055546f3f110 service nova] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] No waiting events found dispatching network-vif-plugged-5db0518b-5552-40e5-80e3-e15e330660eb {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1216.343473] env[61978]: WARNING nova.compute.manager [req-be15c63d-d12c-4c3a-93b3-cd10e96d6d68 req-2a46c756-9255-4ed4-b339-055546f3f110 service nova] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Received unexpected event network-vif-plugged-5db0518b-5552-40e5-80e3-e15e330660eb for instance with vm_state building and task_state spawning. [ 1216.351133] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52773246-7bed-4223-a9ed-eab90737c996, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.419875] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395705, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.434939] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395708, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.665713} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.435289] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] 90a38dba-0dae-455a-8d02-44c2bb098fb5/90a38dba-0dae-455a-8d02-44c2bb098fb5.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1216.435521] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1216.435789] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8e43c43-c8b7-4794-b69c-a5634849fcd7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.444874] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1216.444874] env[61978]: value = "task-1395713" [ 1216.444874] env[61978]: _type = "Task" [ 1216.444874] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.455695] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395713, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.652812] env[61978]: DEBUG oslo_vmware.api [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.826769] env[61978]: DEBUG oslo_concurrency.lockutils [req-c6cb420e-dfd8-4f5a-ab52-79be69290c2e req-3302cd73-7074-4fc6-a0b0-66bb0183ee5b service nova] Releasing lock "refresh_cache-b76dd94e-c14b-48d4-bb7f-020313412ca2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1216.837356] env[61978]: INFO nova.scheduler.client.report [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted allocation for migration baada567-1832-4740-9bdd-43a3e81f2aa6 [ 1216.852942] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52773246-7bed-4223-a9ed-eab90737c996, 'name': SearchDatastore_Task, 'duration_secs': 0.013715} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.853807] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5679990f-3a6e-4257-b3a8-c146d73a7a36 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.859250] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1216.859250] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523151c5-7387-3c97-dc0e-4127dfd8f6a9" [ 1216.859250] env[61978]: _type = "Task" [ 1216.859250] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.866934] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523151c5-7387-3c97-dc0e-4127dfd8f6a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.883893] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Successfully updated port: 5db0518b-5552-40e5-80e3-e15e330660eb {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1216.917493] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395705, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.494299} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.919426] env[61978]: INFO nova.virt.vmwareapi.ds_util [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_484023df-8e16-41f0-9afc-2a7fed2ca270/OSTACK_IMG_484023df-8e16-41f0-9afc-2a7fed2ca270.vmdk to [datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d/443a8916-4f98-4cb9-9e27-49dd792e901d.vmdk. [ 1216.919426] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Cleaning up location [datastore2] OSTACK_IMG_484023df-8e16-41f0-9afc-2a7fed2ca270 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1216.919426] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_484023df-8e16-41f0-9afc-2a7fed2ca270 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1216.919426] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a30bd0ab-250f-45ce-b325-cda85fdc0989 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.924195] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1216.924195] env[61978]: value = "task-1395714" [ 1216.924195] env[61978]: _type = "Task" [ 1216.924195] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.931449] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395714, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.952750] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395713, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071838} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.952997] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1216.953726] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f281ea-521b-4520-97cc-0243d2e367ae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.974942] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 90a38dba-0dae-455a-8d02-44c2bb098fb5/90a38dba-0dae-455a-8d02-44c2bb098fb5.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1216.975197] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a167b452-70cc-4e90-b134-5ab16e5858a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.993052] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1216.993052] env[61978]: value = "task-1395715" [ 1216.993052] env[61978]: _type = "Task" [ 1216.993052] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.000419] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395715, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.076485] env[61978]: DEBUG nova.objects.instance [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'flavor' on Instance uuid d3c82821-0617-4de6-8109-813a67910ed1 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1217.153826] env[61978]: DEBUG oslo_vmware.api [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.773656} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.154150] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1217.154371] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1217.154574] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1217.154799] env[61978]: INFO nova.compute.manager [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Took 1.69 seconds to destroy the instance on the hypervisor. [ 1217.155095] env[61978]: DEBUG oslo.service.loopingcall [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1217.155306] env[61978]: DEBUG nova.compute.manager [-] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1217.155448] env[61978]: DEBUG nova.network.neutron [-] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1217.346942] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 17.672s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.373141] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523151c5-7387-3c97-dc0e-4127dfd8f6a9, 'name': SearchDatastore_Task, 'duration_secs': 0.011388} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.376159] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1217.376424] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] b76dd94e-c14b-48d4-bb7f-020313412ca2/b76dd94e-c14b-48d4-bb7f-020313412ca2.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1217.377285] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a8f2c2b-5b9b-4ca5-b5cb-14564da2948c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.384614] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1217.384614] env[61978]: value = "task-1395716" [ 1217.384614] env[61978]: _type = "Task" [ 1217.384614] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.389632] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "refresh_cache-e9b70b36-d0d8-430e-a5e7-588d3c75d7ff" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1217.389632] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquired lock "refresh_cache-e9b70b36-d0d8-430e-a5e7-588d3c75d7ff" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.389632] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1217.398321] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395716, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.437118] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395714, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035081} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.437118] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1217.437118] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d/443a8916-4f98-4cb9-9e27-49dd792e901d.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1217.437118] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d/443a8916-4f98-4cb9-9e27-49dd792e901d.vmdk to [datastore2] 17c56c1c-9992-4559-ad23-c68909ae6792/17c56c1c-9992-4559-ad23-c68909ae6792.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1217.437118] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f31d20a-dd15-4396-9014-4aa995df96a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.445726] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1217.445726] env[61978]: value = "task-1395717" [ 1217.445726] env[61978]: _type = "Task" [ 1217.445726] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.453730] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395717, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.505486] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395715, 'name': ReconfigVM_Task, 'duration_secs': 0.266825} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.505486] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 90a38dba-0dae-455a-8d02-44c2bb098fb5/90a38dba-0dae-455a-8d02-44c2bb098fb5.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1217.506040] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5dce012-663c-4742-853e-4a1d17a7c4cd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.512981] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1217.512981] env[61978]: value = "task-1395718" [ 1217.512981] env[61978]: _type = "Task" [ 1217.512981] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.521695] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395718, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.570020] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab2be85-57b6-4b07-98d4-114cfc507332 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.575614] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793d2a1d-5ae6-44d9-ac17-d0a10eba29b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.581812] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6e0088b5-0122-4fc4-910d-63aaf3fd1b0e tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "d3c82821-0617-4de6-8109-813a67910ed1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.356s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.609856] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace37f2f-d9c7-4a09-b071-2e6ecb12708a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.621248] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f0345b-1f09-4de2-a557-1ba7e4c8e5c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.640903] env[61978]: DEBUG nova.compute.provider_tree [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1217.847441] env[61978]: DEBUG nova.compute.manager [req-5c2e18e2-71e4-478c-904a-5296e2d53347 req-e2b86655-0aac-40b0-8d3f-414b200f0e58 service nova] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Received event network-changed-5db0518b-5552-40e5-80e3-e15e330660eb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1217.847697] env[61978]: DEBUG nova.compute.manager [req-5c2e18e2-71e4-478c-904a-5296e2d53347 req-e2b86655-0aac-40b0-8d3f-414b200f0e58 service nova] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Refreshing instance network info cache due to event network-changed-5db0518b-5552-40e5-80e3-e15e330660eb. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1217.847918] env[61978]: DEBUG oslo_concurrency.lockutils [req-5c2e18e2-71e4-478c-904a-5296e2d53347 req-e2b86655-0aac-40b0-8d3f-414b200f0e58 service nova] Acquiring lock "refresh_cache-e9b70b36-d0d8-430e-a5e7-588d3c75d7ff" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1217.899063] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395716, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.934232] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1217.954032] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395717, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.966720] env[61978]: DEBUG nova.network.neutron [-] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.022746] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395718, 'name': Rename_Task, 'duration_secs': 0.202709} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.023272] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1218.023552] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ce5fc97-bb45-4673-9c0c-42b48f0ad0ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.030549] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1218.030549] env[61978]: value = "task-1395719" [ 1218.030549] env[61978]: _type = "Task" [ 1218.030549] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.039931] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395719, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.144414] env[61978]: DEBUG nova.scheduler.client.report [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1218.402020] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395716, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563183} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.402020] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] b76dd94e-c14b-48d4-bb7f-020313412ca2/b76dd94e-c14b-48d4-bb7f-020313412ca2.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1218.402020] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1218.402020] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b04ae439-2fba-4257-aa4b-7b0e33d4f502 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.406837] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1218.406837] env[61978]: value = "task-1395720" [ 1218.406837] env[61978]: _type = "Task" [ 1218.406837] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.415931] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395720, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.420731] env[61978]: DEBUG nova.compute.manager [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Stashing vm_state: active {{(pid=61978) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1218.430981] env[61978]: DEBUG nova.network.neutron [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Updating instance_info_cache with network_info: [{"id": "5db0518b-5552-40e5-80e3-e15e330660eb", "address": "fa:16:3e:f9:3f:e9", "network": {"id": "5132b4be-ba71-4558-b493-ca23c53670af", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-2050667042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b43df2aa7c044e0fad5c8f01741dacde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db0518b-55", "ovs_interfaceid": "5db0518b-5552-40e5-80e3-e15e330660eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.458242] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395717, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.470499] env[61978]: INFO nova.compute.manager [-] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Took 1.31 seconds to deallocate network for instance. [ 1218.541420] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395719, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.649876] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1218.650492] env[61978]: DEBUG nova.compute.manager [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1218.653457] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.432s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.653676] env[61978]: DEBUG nova.objects.instance [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lazy-loading 'resources' on Instance uuid c0be687a-7444-4019-8b12-dac41a7c080e {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1218.733769] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.734119] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.734469] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.734713] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.734951] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1218.737347] env[61978]: INFO nova.compute.manager [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Terminating instance [ 1218.739358] env[61978]: DEBUG nova.compute.manager [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1218.739570] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1218.740434] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f9729d-7ec2-48ee-bc88-cfd9b219d5fe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.750971] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1218.751342] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-129b4cf3-e941-4252-9a77-069b0d837e32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.757876] env[61978]: DEBUG oslo_vmware.api [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1218.757876] env[61978]: value = "task-1395721" [ 1218.757876] env[61978]: _type = "Task" [ 1218.757876] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.766138] env[61978]: DEBUG oslo_vmware.api [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395721, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.916237] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395720, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128382} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.916605] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1218.917513] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776bd98b-7ef4-4f10-98bf-b1571fd9fabe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.943920] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] b76dd94e-c14b-48d4-bb7f-020313412ca2/b76dd94e-c14b-48d4-bb7f-020313412ca2.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1218.944834] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Releasing lock "refresh_cache-e9b70b36-d0d8-430e-a5e7-588d3c75d7ff" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.945331] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Instance network_info: |[{"id": "5db0518b-5552-40e5-80e3-e15e330660eb", "address": "fa:16:3e:f9:3f:e9", "network": {"id": "5132b4be-ba71-4558-b493-ca23c53670af", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-2050667042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b43df2aa7c044e0fad5c8f01741dacde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db0518b-55", "ovs_interfaceid": "5db0518b-5552-40e5-80e3-e15e330660eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1218.945476] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6ac959c-04c0-4b55-9225-e4048315e33c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.961315] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.961649] env[61978]: DEBUG oslo_concurrency.lockutils [req-5c2e18e2-71e4-478c-904a-5296e2d53347 req-e2b86655-0aac-40b0-8d3f-414b200f0e58 service nova] Acquired lock "refresh_cache-e9b70b36-d0d8-430e-a5e7-588d3c75d7ff" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.961838] env[61978]: DEBUG nova.network.neutron [req-5c2e18e2-71e4-478c-904a-5296e2d53347 req-e2b86655-0aac-40b0-8d3f-414b200f0e58 service nova] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Refreshing network info cache for port 5db0518b-5552-40e5-80e3-e15e330660eb {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1218.963118] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:3f:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5db0518b-5552-40e5-80e3-e15e330660eb', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1218.970550] env[61978]: DEBUG oslo.service.loopingcall [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1218.974239] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1218.978329] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c737fbc6-05cd-493e-8dad-8da419186f2a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.993469] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.996181] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1218.996181] env[61978]: value = "task-1395722" [ 1218.996181] env[61978]: _type = "Task" [ 1218.996181] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.007074] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1219.007074] env[61978]: value = "task-1395723" [ 1219.007074] env[61978]: _type = "Task" [ 1219.007074] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.007074] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395717, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.011497] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.017050] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395723, 'name': CreateVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.040386] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395719, 'name': PowerOnVM_Task, 'duration_secs': 0.705107} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.042961] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1219.043234] env[61978]: INFO nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Took 8.01 seconds to spawn the instance on the hypervisor. [ 1219.043465] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1219.044328] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca60528-4807-4033-abaa-20a24da8880c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.157203] env[61978]: DEBUG nova.compute.utils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1219.159649] env[61978]: DEBUG nova.compute.manager [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1219.159649] env[61978]: DEBUG nova.network.neutron [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1219.222814] env[61978]: DEBUG nova.policy [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92ae4322e38147639a5b152b79b97143', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6968cd62406944afad1081b2558d4949', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1219.270364] env[61978]: DEBUG oslo_vmware.api [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395721, 'name': PowerOffVM_Task, 'duration_secs': 0.372503} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.270897] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1219.270897] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1219.271149] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0dc40805-935b-4e7e-8155-b369327a37e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.283667] env[61978]: DEBUG nova.network.neutron [req-5c2e18e2-71e4-478c-904a-5296e2d53347 req-e2b86655-0aac-40b0-8d3f-414b200f0e58 service nova] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Updated VIF entry in instance network info cache for port 5db0518b-5552-40e5-80e3-e15e330660eb. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1219.284084] env[61978]: DEBUG nova.network.neutron [req-5c2e18e2-71e4-478c-904a-5296e2d53347 req-e2b86655-0aac-40b0-8d3f-414b200f0e58 service nova] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Updating instance_info_cache with network_info: [{"id": "5db0518b-5552-40e5-80e3-e15e330660eb", "address": "fa:16:3e:f9:3f:e9", "network": {"id": "5132b4be-ba71-4558-b493-ca23c53670af", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-2050667042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b43df2aa7c044e0fad5c8f01741dacde", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5db0518b-55", "ovs_interfaceid": "5db0518b-5552-40e5-80e3-e15e330660eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.340393] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1219.340879] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1219.341013] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleting the datastore file [datastore2] b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1219.342277] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9b6679e-5f78-4cf7-8409-f78f84a75e27 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.354957] env[61978]: DEBUG oslo_vmware.api [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for the task: (returnval){ [ 1219.354957] env[61978]: value = "task-1395725" [ 1219.354957] env[61978]: _type = "Task" [ 1219.354957] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.367114] env[61978]: DEBUG oslo_vmware.api [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395725, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.482316] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395717, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.503615] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add90a45-d524-46ec-ba7a-79dd5aa3fdba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.509940] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395722, 'name': ReconfigVM_Task, 'duration_secs': 0.367164} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.513622] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Reconfigured VM instance instance-00000059 to attach disk [datastore1] b76dd94e-c14b-48d4-bb7f-020313412ca2/b76dd94e-c14b-48d4-bb7f-020313412ca2.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1219.514424] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca6c2bef-1407-465c-8106-be66976f4461 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.519419] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286f61bd-8976-41d2-881c-f11ed9dc66a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.527972] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395723, 'name': CreateVM_Task, 'duration_secs': 0.487904} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.528313] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1219.528313] env[61978]: value = "task-1395726" [ 1219.528313] env[61978]: _type = "Task" [ 1219.528313] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.529187] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1219.529708] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1219.529823] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.530421] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1219.560559] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff7d3c02-d097-450e-9124-e8991af6f60b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.573484] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5fdb335-c758-478b-9536-768323157830 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.573484] env[61978]: INFO nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Took 18.08 seconds to build instance. [ 1219.578646] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395726, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.579833] env[61978]: DEBUG nova.network.neutron [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Successfully created port: 47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1219.584216] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1219.584216] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5279c39d-86c8-e958-da20-1c695277266b" [ 1219.584216] env[61978]: _type = "Task" [ 1219.584216] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.589936] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994c40c2-0ad1-4549-8f80-deafc5b242e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.599172] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5279c39d-86c8-e958-da20-1c695277266b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.608554] env[61978]: DEBUG nova.compute.provider_tree [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.665882] env[61978]: DEBUG nova.compute.manager [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1219.787500] env[61978]: DEBUG oslo_concurrency.lockutils [req-5c2e18e2-71e4-478c-904a-5296e2d53347 req-e2b86655-0aac-40b0-8d3f-414b200f0e58 service nova] Releasing lock "refresh_cache-e9b70b36-d0d8-430e-a5e7-588d3c75d7ff" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1219.788064] env[61978]: DEBUG nova.compute.manager [req-5c2e18e2-71e4-478c-904a-5296e2d53347 req-e2b86655-0aac-40b0-8d3f-414b200f0e58 service nova] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Received event network-vif-deleted-c97a7df3-acef-43d7-9e3b-2117f142a29d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1219.788412] env[61978]: INFO nova.compute.manager [req-5c2e18e2-71e4-478c-904a-5296e2d53347 req-e2b86655-0aac-40b0-8d3f-414b200f0e58 service nova] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Neutron deleted interface c97a7df3-acef-43d7-9e3b-2117f142a29d; detaching it from the instance and deleting it from the info cache [ 1219.788769] env[61978]: DEBUG nova.network.neutron [req-5c2e18e2-71e4-478c-904a-5296e2d53347 req-e2b86655-0aac-40b0-8d3f-414b200f0e58 service nova] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.864819] env[61978]: DEBUG oslo_vmware.api [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395725, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.983491] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395717, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.495082} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.983844] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d/443a8916-4f98-4cb9-9e27-49dd792e901d.vmdk to [datastore2] 17c56c1c-9992-4559-ad23-c68909ae6792/17c56c1c-9992-4559-ad23-c68909ae6792.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1219.984678] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0cfed7-de76-4ea9-934f-ce5e28a0eba0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.007303] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 17c56c1c-9992-4559-ad23-c68909ae6792/17c56c1c-9992-4559-ad23-c68909ae6792.vmdk or device None with type streamOptimized {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1220.007610] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdc67bb4-1287-43b7-9628-1b81c3d37288 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.026616] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1220.026616] env[61978]: value = "task-1395727" [ 1220.026616] env[61978]: _type = "Task" [ 1220.026616] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.037233] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395727, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.040239] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395726, 'name': Rename_Task, 'duration_secs': 0.142945} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.040502] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1220.040748] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e474156-b422-4ef4-ac33-ff29613175c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.046361] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1220.046361] env[61978]: value = "task-1395728" [ 1220.046361] env[61978]: _type = "Task" [ 1220.046361] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.053518] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395728, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.079646] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "90a38dba-0dae-455a-8d02-44c2bb098fb5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.593s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.094746] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5279c39d-86c8-e958-da20-1c695277266b, 'name': SearchDatastore_Task, 'duration_secs': 0.026035} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.095059] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1220.095304] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1220.095547] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1220.095699] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.095879] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1220.096157] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35d02fe1-8f2b-47e8-bb4f-298d6e47f566 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.103643] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1220.103819] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1220.104548] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72f72c87-a548-4dc2-8419-f2c9ecbe94f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.109230] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1220.109230] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c5364c-eb00-a6f5-7478-5bcf72ef41bf" [ 1220.109230] env[61978]: _type = "Task" [ 1220.109230] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.112469] env[61978]: DEBUG nova.scheduler.client.report [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1220.120956] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c5364c-eb00-a6f5-7478-5bcf72ef41bf, 'name': SearchDatastore_Task, 'duration_secs': 0.009194} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.121706] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbc2df8c-a762-4884-8cd3-725412253429 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.126278] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1220.126278] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52354709-1197-53b9-d711-451378cfbcae" [ 1220.126278] env[61978]: _type = "Task" [ 1220.126278] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.134157] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52354709-1197-53b9-d711-451378cfbcae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.292205] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f49b06e-99b2-49c9-9b1d-fe1baf6f495f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.301266] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7299217a-3e45-4603-9d22-4e3d71b8cb75 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.332570] env[61978]: DEBUG nova.compute.manager [req-5c2e18e2-71e4-478c-904a-5296e2d53347 req-e2b86655-0aac-40b0-8d3f-414b200f0e58 service nova] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Detach interface failed, port_id=c97a7df3-acef-43d7-9e3b-2117f142a29d, reason: Instance 97e128f9-7135-46b0-b22a-ee5449ba48b6 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1220.364681] env[61978]: DEBUG oslo_vmware.api [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Task: {'id': task-1395725, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.676187} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.364929] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1220.365137] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1220.365328] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1220.365517] env[61978]: INFO nova.compute.manager [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1220.365768] env[61978]: DEBUG oslo.service.loopingcall [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1220.365969] env[61978]: DEBUG nova.compute.manager [-] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1220.366077] env[61978]: DEBUG nova.network.neutron [-] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1220.536839] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.556291] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395728, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.617724] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.964s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.619919] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.215s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.620195] env[61978]: DEBUG nova.objects.instance [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lazy-loading 'resources' on Instance uuid 1eae10e8-58b1-435d-86fc-0674725ce6cd {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1220.638322] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52354709-1197-53b9-d711-451378cfbcae, 'name': SearchDatastore_Task, 'duration_secs': 0.00857} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.638820] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1220.639142] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] e9b70b36-d0d8-430e-a5e7-588d3c75d7ff/e9b70b36-d0d8-430e-a5e7-588d3c75d7ff.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1220.639312] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bcbc883-513e-4f47-bc45-548bb5a1f381 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.643900] env[61978]: INFO nova.scheduler.client.report [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Deleted allocations for instance c0be687a-7444-4019-8b12-dac41a7c080e [ 1220.647283] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1220.647283] env[61978]: value = "task-1395729" [ 1220.647283] env[61978]: _type = "Task" [ 1220.647283] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.660381] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395729, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.676094] env[61978]: DEBUG nova.compute.manager [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1220.704258] env[61978]: DEBUG nova.virt.hardware [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1220.704690] env[61978]: DEBUG nova.virt.hardware [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1220.704890] env[61978]: DEBUG nova.virt.hardware [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1220.705137] env[61978]: DEBUG nova.virt.hardware [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1220.705333] env[61978]: DEBUG nova.virt.hardware [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1220.705501] env[61978]: DEBUG nova.virt.hardware [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1220.705721] env[61978]: DEBUG nova.virt.hardware [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1220.705913] env[61978]: DEBUG nova.virt.hardware [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1220.706128] env[61978]: DEBUG nova.virt.hardware [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1220.706770] env[61978]: DEBUG nova.virt.hardware [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1220.706770] env[61978]: DEBUG nova.virt.hardware [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1220.707612] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b83ea7-6491-4169-a4fc-56edd9bb39e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.716744] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc7e716-a700-4ba5-8b6c-6f89c3fde42f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.753739] env[61978]: DEBUG nova.compute.manager [req-d35e5334-401e-4b94-a254-45ed1d0737a4 req-41cbf370-e22e-4461-baf1-e0be4bedaab7 service nova] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Received event network-vif-deleted-2efcc135-18f4-45d3-9408-817cdbada770 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1220.753739] env[61978]: INFO nova.compute.manager [req-d35e5334-401e-4b94-a254-45ed1d0737a4 req-41cbf370-e22e-4461-baf1-e0be4bedaab7 service nova] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Neutron deleted interface 2efcc135-18f4-45d3-9408-817cdbada770; detaching it from the instance and deleting it from the info cache [ 1220.753739] env[61978]: DEBUG nova.network.neutron [req-d35e5334-401e-4b94-a254-45ed1d0737a4 req-41cbf370-e22e-4461-baf1-e0be4bedaab7 service nova] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.040912] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.060764] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395728, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.080834] env[61978]: DEBUG nova.network.neutron [-] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.159980] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd28b06e-c4ac-4ab1-8803-8c55493010b3 tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "c0be687a-7444-4019-8b12-dac41a7c080e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.230s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1221.165200] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395729, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.180971] env[61978]: DEBUG nova.network.neutron [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Successfully updated port: 47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1221.255619] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a8a1416-e772-460a-b330-68859cfcdb98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.267074] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4724c668-3eb5-487d-aa1f-60db8c3c9dc9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.305944] env[61978]: DEBUG nova.compute.manager [req-d35e5334-401e-4b94-a254-45ed1d0737a4 req-41cbf370-e22e-4461-baf1-e0be4bedaab7 service nova] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Detach interface failed, port_id=2efcc135-18f4-45d3-9408-817cdbada770, reason: Instance b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1221.326776] env[61978]: DEBUG nova.compute.manager [req-2632cd69-4660-428e-8a17-92fcda2040a3 req-8e739cfd-9ffa-4cae-93e2-6d921b609ed4 service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Received event network-vif-plugged-47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1221.327109] env[61978]: DEBUG oslo_concurrency.lockutils [req-2632cd69-4660-428e-8a17-92fcda2040a3 req-8e739cfd-9ffa-4cae-93e2-6d921b609ed4 service nova] Acquiring lock "27713bbd-1234-44ae-8520-78d85baaae12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.328108] env[61978]: DEBUG oslo_concurrency.lockutils [req-2632cd69-4660-428e-8a17-92fcda2040a3 req-8e739cfd-9ffa-4cae-93e2-6d921b609ed4 service nova] Lock "27713bbd-1234-44ae-8520-78d85baaae12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.328567] env[61978]: DEBUG oslo_concurrency.lockutils [req-2632cd69-4660-428e-8a17-92fcda2040a3 req-8e739cfd-9ffa-4cae-93e2-6d921b609ed4 service nova] Lock "27713bbd-1234-44ae-8520-78d85baaae12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1221.328839] env[61978]: DEBUG nova.compute.manager [req-2632cd69-4660-428e-8a17-92fcda2040a3 req-8e739cfd-9ffa-4cae-93e2-6d921b609ed4 service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] No waiting events found dispatching network-vif-plugged-47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1221.329074] env[61978]: WARNING nova.compute.manager [req-2632cd69-4660-428e-8a17-92fcda2040a3 req-8e739cfd-9ffa-4cae-93e2-6d921b609ed4 service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Received unexpected event network-vif-plugged-47785e4d-5976-42da-b954-01d1e5ec6d75 for instance with vm_state building and task_state spawning. [ 1221.430415] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885933cd-e361-46d1-8ecf-c82878ec948e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.438707] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d95b7d5-5e66-4899-934c-522c0186a3b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.471561] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d33f3a1-a2c7-41a9-978a-f7fd972b3461 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.480704] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd8f3f1-d855-4cbe-9c2b-ee98d057deec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.495025] env[61978]: DEBUG nova.compute.provider_tree [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1221.503242] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.503454] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.538552] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395727, 'name': ReconfigVM_Task, 'duration_secs': 1.159726} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.538872] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 17c56c1c-9992-4559-ad23-c68909ae6792/17c56c1c-9992-4559-ad23-c68909ae6792.vmdk or device None with type streamOptimized {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1221.539509] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f934cc8-cb4a-4ba4-80d2-9e446e8feaa2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.545441] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1221.545441] env[61978]: value = "task-1395730" [ 1221.545441] env[61978]: _type = "Task" [ 1221.545441] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.555242] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395730, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.559769] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395728, 'name': PowerOnVM_Task, 'duration_secs': 1.105957} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.560018] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1221.560234] env[61978]: INFO nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Took 8.17 seconds to spawn the instance on the hypervisor. [ 1221.560420] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1221.561142] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367c5bfc-20b8-41db-8925-20290e4c5027 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.585060] env[61978]: INFO nova.compute.manager [-] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Took 1.22 seconds to deallocate network for instance. [ 1221.661955] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395729, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.751713} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.662298] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] e9b70b36-d0d8-430e-a5e7-588d3c75d7ff/e9b70b36-d0d8-430e-a5e7-588d3c75d7ff.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1221.662594] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1221.662868] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70ef9c9d-2785-453a-ac28-b3e9437df455 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.668936] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1221.668936] env[61978]: value = "task-1395731" [ 1221.668936] env[61978]: _type = "Task" [ 1221.668936] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.678030] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395731, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.687664] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquiring lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1221.687812] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquired lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.687952] env[61978]: DEBUG nova.network.neutron [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1221.951073] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "03b08977-4b20-4bac-b48b-06ba5df4e579" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.951316] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "03b08977-4b20-4bac-b48b-06ba5df4e579" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.998611] env[61978]: DEBUG nova.scheduler.client.report [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1222.010395] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1222.010555] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1222.056228] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395730, 'name': Rename_Task, 'duration_secs': 0.158377} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.056716] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1222.057696] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73930287-5174-4fa7-9159-38e5efeb2787 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.063445] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1222.063445] env[61978]: value = "task-1395732" [ 1222.063445] env[61978]: _type = "Task" [ 1222.063445] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.075551] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395732, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.077461] env[61978]: INFO nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Took 20.54 seconds to build instance. [ 1222.092421] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.178344] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395731, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066087} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.178694] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1222.179496] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5565dc-ae62-4bc3-b2a7-77eb62049745 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.204315] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] e9b70b36-d0d8-430e-a5e7-588d3c75d7ff/e9b70b36-d0d8-430e-a5e7-588d3c75d7ff.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1222.204612] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e6b14b9-c2b8-4281-bc6a-9778c00fc2bd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.223727] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1222.223727] env[61978]: value = "task-1395733" [ 1222.223727] env[61978]: _type = "Task" [ 1222.223727] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.231727] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395733, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.247332] env[61978]: DEBUG nova.network.neutron [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1222.374425] env[61978]: DEBUG nova.network.neutron [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updating instance_info_cache with network_info: [{"id": "47785e4d-5976-42da-b954-01d1e5ec6d75", "address": "fa:16:3e:06:c3:3b", "network": {"id": "b5590b99-32cd-425d-be74-a3ce197f3da6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1851734399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6968cd62406944afad1081b2558d4949", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ad8894f-e240-4013-8272-4e79daea0751", "external-id": "nsx-vlan-transportzone-204", "segmentation_id": 204, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47785e4d-59", "ovs_interfaceid": "47785e4d-5976-42da-b954-01d1e5ec6d75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.453698] env[61978]: DEBUG nova.compute.manager [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1222.503975] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.884s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.506354] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.779s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.506591] env[61978]: DEBUG nova.objects.instance [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61978) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1222.545732] env[61978]: INFO nova.scheduler.client.report [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Deleted allocations for instance 1eae10e8-58b1-435d-86fc-0674725ce6cd [ 1222.573342] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395732, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.580907] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "b76dd94e-c14b-48d4-bb7f-020313412ca2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.060s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.734621] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395733, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.878053] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Releasing lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1222.878053] env[61978]: DEBUG nova.compute.manager [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Instance network_info: |[{"id": "47785e4d-5976-42da-b954-01d1e5ec6d75", "address": "fa:16:3e:06:c3:3b", "network": {"id": "b5590b99-32cd-425d-be74-a3ce197f3da6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1851734399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6968cd62406944afad1081b2558d4949", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ad8894f-e240-4013-8272-4e79daea0751", "external-id": "nsx-vlan-transportzone-204", "segmentation_id": 204, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47785e4d-59", "ovs_interfaceid": "47785e4d-5976-42da-b954-01d1e5ec6d75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1222.878053] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:c3:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ad8894f-e240-4013-8272-4e79daea0751', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47785e4d-5976-42da-b954-01d1e5ec6d75', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1222.885846] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Creating folder: Project (6968cd62406944afad1081b2558d4949). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1222.886233] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f7514ca-7ca4-43e5-97a2-10a01f504cf9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.898121] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Created folder: Project (6968cd62406944afad1081b2558d4949) in parent group-v295764. [ 1222.898338] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Creating folder: Instances. Parent ref: group-v296011. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1222.898587] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9358460-0da8-4989-b07e-6344c0440d7c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.908923] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Created folder: Instances in parent group-v296011. [ 1222.909787] env[61978]: DEBUG oslo.service.loopingcall [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1222.909787] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1222.909787] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87052767-3182-432b-9869-30045beacf72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.932043] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1222.932043] env[61978]: value = "task-1395736" [ 1222.932043] env[61978]: _type = "Task" [ 1222.932043] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.939833] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395736, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.972448] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.053100] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cd7dc0d6-9037-4c34-88b4-30e9414c471e tempest-AttachInterfacesUnderV243Test-1010671676 tempest-AttachInterfacesUnderV243Test-1010671676-project-member] Lock "1eae10e8-58b1-435d-86fc-0674725ce6cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.873s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.074262] env[61978]: DEBUG oslo_vmware.api [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395732, 'name': PowerOnVM_Task, 'duration_secs': 0.827919} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.074559] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1223.179102] env[61978]: DEBUG nova.compute.manager [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1223.180181] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b75d38a-e8a1-4deb-8627-e70a2e0da428 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.235598] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395733, 'name': ReconfigVM_Task, 'duration_secs': 0.586574} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.235886] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Reconfigured VM instance instance-0000005a to attach disk [datastore1] e9b70b36-d0d8-430e-a5e7-588d3c75d7ff/e9b70b36-d0d8-430e-a5e7-588d3c75d7ff.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1223.236547] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24d78e63-4a91-473e-8410-01f7facc05b1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.242552] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1223.242552] env[61978]: value = "task-1395737" [ 1223.242552] env[61978]: _type = "Task" [ 1223.242552] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.250472] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395737, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.392898] env[61978]: DEBUG nova.compute.manager [req-cf0436bb-1e15-411e-8da5-ddcc7efe459b req-db7c69be-0ed6-4cc3-9d84-db111906e78b service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Received event network-changed-47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1223.393216] env[61978]: DEBUG nova.compute.manager [req-cf0436bb-1e15-411e-8da5-ddcc7efe459b req-db7c69be-0ed6-4cc3-9d84-db111906e78b service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Refreshing instance network info cache due to event network-changed-47785e4d-5976-42da-b954-01d1e5ec6d75. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1223.393441] env[61978]: DEBUG oslo_concurrency.lockutils [req-cf0436bb-1e15-411e-8da5-ddcc7efe459b req-db7c69be-0ed6-4cc3-9d84-db111906e78b service nova] Acquiring lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1223.393708] env[61978]: DEBUG oslo_concurrency.lockutils [req-cf0436bb-1e15-411e-8da5-ddcc7efe459b req-db7c69be-0ed6-4cc3-9d84-db111906e78b service nova] Acquired lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.393779] env[61978]: DEBUG nova.network.neutron [req-cf0436bb-1e15-411e-8da5-ddcc7efe459b req-db7c69be-0ed6-4cc3-9d84-db111906e78b service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Refreshing network info cache for port 47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1223.441682] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395736, 'name': CreateVM_Task, 'duration_secs': 0.477881} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.441935] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1223.442655] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1223.442837] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.443185] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1223.443451] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dad8623-f457-49b5-8769-ab25c56dcf6e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.447816] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1223.447816] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b5f3d9-c401-ef4b-208f-3f00bef79901" [ 1223.447816] env[61978]: _type = "Task" [ 1223.447816] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.456113] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b5f3d9-c401-ef4b-208f-3f00bef79901, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.519857] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b3f464d6-1aa6-4833-a7a4-30fe448788c1 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.520903] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.392s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.521732] env[61978]: DEBUG nova.objects.instance [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lazy-loading 'resources' on Instance uuid bdfdd685-e440-4f53-b6c4-2ee2f06acba8 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1223.697473] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4e2de175-6bce-45cb-a1eb-127de7b35c31 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "17c56c1c-9992-4559-ad23-c68909ae6792" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 23.804s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.752461] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395737, 'name': Rename_Task, 'duration_secs': 0.167387} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.752751] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1223.753012] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4980681f-dd94-421a-a28a-f7ecd741155e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.758952] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1223.758952] env[61978]: value = "task-1395738" [ 1223.758952] env[61978]: _type = "Task" [ 1223.758952] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.767871] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395738, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.957838] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b5f3d9-c401-ef4b-208f-3f00bef79901, 'name': SearchDatastore_Task, 'duration_secs': 0.009525} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.958181] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1223.958435] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1223.958705] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1223.958861] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.959101] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1223.959369] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3058cacc-7988-4fb3-b9d3-95549362ee23 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.967861] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1223.968053] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1223.968773] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-771d31dd-8c2a-4951-b7c8-01bae5eba3f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.973415] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1223.973415] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d2bfcc-98f8-4391-e0c3-d9f256eb74b8" [ 1223.973415] env[61978]: _type = "Task" [ 1223.973415] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.980712] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d2bfcc-98f8-4391-e0c3-d9f256eb74b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.219217] env[61978]: DEBUG nova.network.neutron [req-cf0436bb-1e15-411e-8da5-ddcc7efe459b req-db7c69be-0ed6-4cc3-9d84-db111906e78b service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updated VIF entry in instance network info cache for port 47785e4d-5976-42da-b954-01d1e5ec6d75. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1224.219399] env[61978]: DEBUG nova.network.neutron [req-cf0436bb-1e15-411e-8da5-ddcc7efe459b req-db7c69be-0ed6-4cc3-9d84-db111906e78b service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updating instance_info_cache with network_info: [{"id": "47785e4d-5976-42da-b954-01d1e5ec6d75", "address": "fa:16:3e:06:c3:3b", "network": {"id": "b5590b99-32cd-425d-be74-a3ce197f3da6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1851734399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6968cd62406944afad1081b2558d4949", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ad8894f-e240-4013-8272-4e79daea0751", "external-id": "nsx-vlan-transportzone-204", "segmentation_id": 204, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47785e4d-59", "ovs_interfaceid": "47785e4d-5976-42da-b954-01d1e5ec6d75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.272830] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395738, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.298393] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1513d3-48d4-400a-a77a-d2ad0dcdad52 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.305360] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3a8665-71c6-45f0-8977-3612438d20b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.335784] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69d1080-f54b-48b2-a3bb-e332a05c1b77 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.345960] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a48eaf-2886-45eb-bd88-034851074aeb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.358728] env[61978]: DEBUG nova.compute.provider_tree [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1224.486538] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d2bfcc-98f8-4391-e0c3-d9f256eb74b8, 'name': SearchDatastore_Task, 'duration_secs': 0.0085} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.487322] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72824932-fddf-4d40-b448-f3e58e22c01c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.492665] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1224.492665] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52af0000-4411-c927-17ed-99b565bf1397" [ 1224.492665] env[61978]: _type = "Task" [ 1224.492665] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.500945] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52af0000-4411-c927-17ed-99b565bf1397, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.724471] env[61978]: DEBUG oslo_concurrency.lockutils [req-cf0436bb-1e15-411e-8da5-ddcc7efe459b req-db7c69be-0ed6-4cc3-9d84-db111906e78b service nova] Releasing lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1224.774104] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395738, 'name': PowerOnVM_Task} progress is 78%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.861922] env[61978]: DEBUG nova.scheduler.client.report [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1225.008228] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52af0000-4411-c927-17ed-99b565bf1397, 'name': SearchDatastore_Task, 'duration_secs': 0.009734} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.008413] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1225.008809] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 27713bbd-1234-44ae-8520-78d85baaae12/27713bbd-1234-44ae-8520-78d85baaae12.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1225.008971] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b852b5a1-433b-4b95-83fc-b5982274f4f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.016014] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1225.016014] env[61978]: value = "task-1395739" [ 1225.016014] env[61978]: _type = "Task" [ 1225.016014] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.025548] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395739, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.275420] env[61978]: DEBUG oslo_vmware.api [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395738, 'name': PowerOnVM_Task, 'duration_secs': 1.380842} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.275665] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1225.276331] env[61978]: INFO nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Took 9.47 seconds to spawn the instance on the hypervisor. [ 1225.276331] env[61978]: DEBUG nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1225.277226] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7689b431-6ac7-47f5-8fc5-050eac29856e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.366987] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.846s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.369736] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.999s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.371514] env[61978]: INFO nova.compute.claims [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1225.398740] env[61978]: INFO nova.scheduler.client.report [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Deleted allocations for instance bdfdd685-e440-4f53-b6c4-2ee2f06acba8 [ 1225.507430] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16165493-99d1-42fc-80ca-e574ada0b997 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.515273] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-df99d50d-8e85-4da3-9b01-8ca36c3a61f5 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Suspending the VM {{(pid=61978) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1225.515649] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d22f48da-41c7-46c5-a2f7-843a484eacaa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.527009] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395739, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509217} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.528331] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 27713bbd-1234-44ae-8520-78d85baaae12/27713bbd-1234-44ae-8520-78d85baaae12.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1225.528560] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1225.528910] env[61978]: DEBUG oslo_vmware.api [None req-df99d50d-8e85-4da3-9b01-8ca36c3a61f5 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1225.528910] env[61978]: value = "task-1395740" [ 1225.528910] env[61978]: _type = "Task" [ 1225.528910] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.530112] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c70d291c-969b-4f22-a76d-58af7690d4c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.541518] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1225.541518] env[61978]: value = "task-1395741" [ 1225.541518] env[61978]: _type = "Task" [ 1225.541518] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.544682] env[61978]: DEBUG oslo_vmware.api [None req-df99d50d-8e85-4da3-9b01-8ca36c3a61f5 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395740, 'name': SuspendVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.553442] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395741, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.800167] env[61978]: INFO nova.compute.manager [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Took 24.23 seconds to build instance. [ 1225.905184] env[61978]: DEBUG oslo_concurrency.lockutils [None req-eff6a743-b372-4573-8d40-84f5406d5090 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "bdfdd685-e440-4f53-b6c4-2ee2f06acba8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.248s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.049413] env[61978]: DEBUG oslo_vmware.api [None req-df99d50d-8e85-4da3-9b01-8ca36c3a61f5 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395740, 'name': SuspendVM_Task} progress is 75%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.061202] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395741, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066261} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.061499] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1226.062321] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1090af-2e97-4d79-b506-b7c8906f1345 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.088218] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 27713bbd-1234-44ae-8520-78d85baaae12/27713bbd-1234-44ae-8520-78d85baaae12.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1226.088543] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee39e902-c558-45d0-9970-cc6df0335993 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.107809] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1226.107809] env[61978]: value = "task-1395742" [ 1226.107809] env[61978]: _type = "Task" [ 1226.107809] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.115609] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395742, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.140890] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.141178] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.141391] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.141588] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.141765] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.144071] env[61978]: INFO nova.compute.manager [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Terminating instance [ 1226.145973] env[61978]: DEBUG nova.compute.manager [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1226.146197] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1226.147049] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9451296e-5d32-4b3d-85be-3d72cac74e3d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.153684] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1226.153923] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63f15905-5d8f-40c9-ada0-67155b9ddf49 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.159663] env[61978]: DEBUG oslo_vmware.api [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1226.159663] env[61978]: value = "task-1395743" [ 1226.159663] env[61978]: _type = "Task" [ 1226.159663] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.166783] env[61978]: DEBUG oslo_vmware.api [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395743, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.302912] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e16c60c6-e2f8-49bb-b736-fc8cea7ba0e7 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "e9b70b36-d0d8-430e-a5e7-588d3c75d7ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.741s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.544819] env[61978]: DEBUG oslo_vmware.api [None req-df99d50d-8e85-4da3-9b01-8ca36c3a61f5 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395740, 'name': SuspendVM_Task} progress is 75%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.618546] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395742, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.644381] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0afe25-1fc1-4dcd-92db-e2069d35a0c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.653201] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e91670c-3b89-4f66-b57e-5b9598d7c411 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.689808] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb50bdbd-8484-49ab-a9d9-adc690ab0197 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.698148] env[61978]: DEBUG oslo_vmware.api [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395743, 'name': PowerOffVM_Task, 'duration_secs': 0.384736} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.700437] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1226.700656] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1226.700998] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-506c1932-d2bd-4793-9ca7-5205d2a120d3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.703615] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4c99f6-156b-4c73-b866-9a313336f649 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.719235] env[61978]: DEBUG nova.compute.provider_tree [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1226.769670] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1226.769909] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1226.770116] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Deleting the datastore file [datastore2] 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1226.770393] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9cdb8ff5-4c4f-4375-a493-9df1f23d662c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.777878] env[61978]: DEBUG oslo_vmware.api [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for the task: (returnval){ [ 1226.777878] env[61978]: value = "task-1395745" [ 1226.777878] env[61978]: _type = "Task" [ 1226.777878] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.786158] env[61978]: DEBUG oslo_vmware.api [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.043707] env[61978]: DEBUG oslo_vmware.api [None req-df99d50d-8e85-4da3-9b01-8ca36c3a61f5 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395740, 'name': SuspendVM_Task, 'duration_secs': 1.067769} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.043994] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-df99d50d-8e85-4da3-9b01-8ca36c3a61f5 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Suspended the VM {{(pid=61978) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1227.044203] env[61978]: DEBUG nova.compute.manager [None req-df99d50d-8e85-4da3-9b01-8ca36c3a61f5 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1227.044977] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ed953b-4d9c-4516-a6bc-b9b980df3c36 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.068913] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1918a979-4fbc-491e-ac72-b58f0404889b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "interface-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.069512] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1918a979-4fbc-491e-ac72-b58f0404889b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.069512] env[61978]: DEBUG nova.objects.instance [None req-1918a979-4fbc-491e-ac72-b58f0404889b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'flavor' on Instance uuid 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1227.119388] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395742, 'name': ReconfigVM_Task, 'duration_secs': 0.739057} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.119695] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 27713bbd-1234-44ae-8520-78d85baaae12/27713bbd-1234-44ae-8520-78d85baaae12.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1227.120450] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eca4d093-8cd3-437d-b894-fcc519327e65 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.127031] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1227.127031] env[61978]: value = "task-1395746" [ 1227.127031] env[61978]: _type = "Task" [ 1227.127031] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.135298] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395746, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.224438] env[61978]: DEBUG nova.scheduler.client.report [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1227.288771] env[61978]: DEBUG oslo_vmware.api [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Task: {'id': task-1395745, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138835} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.288771] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1227.288771] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1227.288771] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1227.289044] env[61978]: INFO nova.compute.manager [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1227.289137] env[61978]: DEBUG oslo.service.loopingcall [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1227.289325] env[61978]: DEBUG nova.compute.manager [-] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1227.289422] env[61978]: DEBUG nova.network.neutron [-] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1227.574030] env[61978]: DEBUG nova.objects.instance [None req-1918a979-4fbc-491e-ac72-b58f0404889b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'pci_requests' on Instance uuid 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1227.637684] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395746, 'name': Rename_Task, 'duration_secs': 0.432736} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.638091] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1227.638393] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0512e24e-60ac-4ec1-9d87-1384d74d5567 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.645384] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1227.645384] env[61978]: value = "task-1395747" [ 1227.645384] env[61978]: _type = "Task" [ 1227.645384] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.653098] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395747, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.676432] env[61978]: DEBUG nova.compute.manager [req-ba340f2c-f630-4ffb-8564-364c1ab0819f req-7a35a832-ec5f-4f42-9759-e4764393d201 service nova] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Received event network-vif-deleted-461cf97f-d4c1-4a04-bc0f-ea10c52ecce3 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1227.676679] env[61978]: INFO nova.compute.manager [req-ba340f2c-f630-4ffb-8564-364c1ab0819f req-7a35a832-ec5f-4f42-9759-e4764393d201 service nova] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Neutron deleted interface 461cf97f-d4c1-4a04-bc0f-ea10c52ecce3; detaching it from the instance and deleting it from the info cache [ 1227.676885] env[61978]: DEBUG nova.network.neutron [req-ba340f2c-f630-4ffb-8564-364c1ab0819f req-7a35a832-ec5f-4f42-9759-e4764393d201 service nova] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.728973] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.359s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.729527] env[61978]: DEBUG nova.compute.manager [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1227.732040] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.479s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.733671] env[61978]: INFO nova.compute.claims [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1227.809850] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "90a38dba-0dae-455a-8d02-44c2bb098fb5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.810157] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "90a38dba-0dae-455a-8d02-44c2bb098fb5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.810385] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "90a38dba-0dae-455a-8d02-44c2bb098fb5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.810604] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "90a38dba-0dae-455a-8d02-44c2bb098fb5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.810806] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "90a38dba-0dae-455a-8d02-44c2bb098fb5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.814268] env[61978]: INFO nova.compute.manager [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Terminating instance [ 1227.819118] env[61978]: DEBUG nova.compute.manager [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1227.819398] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1227.820653] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de8f1bb-b4ef-4bc8-86d6-be20c981cb65 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.829051] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1227.829396] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e07bc309-67a8-4f73-b86a-40a5e2017e35 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.835984] env[61978]: DEBUG oslo_vmware.api [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1227.835984] env[61978]: value = "task-1395748" [ 1227.835984] env[61978]: _type = "Task" [ 1227.835984] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.844672] env[61978]: DEBUG oslo_vmware.api [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395748, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.079616] env[61978]: DEBUG nova.objects.base [None req-1918a979-4fbc-491e-ac72-b58f0404889b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Object Instance<758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9> lazy-loaded attributes: flavor,pci_requests {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1228.079616] env[61978]: DEBUG nova.network.neutron [None req-1918a979-4fbc-491e-ac72-b58f0404889b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1228.088451] env[61978]: DEBUG nova.network.neutron [-] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.155996] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395747, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.180343] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2e6dfaeb-22ca-4951-ab09-7ab2288e39e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.189171] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450765bc-6bfc-452d-90ed-0fbd3d830387 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.220790] env[61978]: DEBUG nova.compute.manager [req-ba340f2c-f630-4ffb-8564-364c1ab0819f req-7a35a832-ec5f-4f42-9759-e4764393d201 service nova] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Detach interface failed, port_id=461cf97f-d4c1-4a04-bc0f-ea10c52ecce3, reason: Instance 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1228.228241] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1918a979-4fbc-491e-ac72-b58f0404889b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.159s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.230032] env[61978]: INFO nova.compute.manager [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Resuming [ 1228.230350] env[61978]: DEBUG nova.objects.instance [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lazy-loading 'flavor' on Instance uuid 17c56c1c-9992-4559-ad23-c68909ae6792 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1228.240722] env[61978]: DEBUG nova.compute.utils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1228.246481] env[61978]: DEBUG nova.compute.manager [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1228.246481] env[61978]: DEBUG nova.network.neutron [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1228.304511] env[61978]: DEBUG nova.policy [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ab697d6ab4e4ece8b290afbf5ec1366', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a33ac41ae0247b59c400c6ed9145239', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1228.345732] env[61978]: DEBUG oslo_vmware.api [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395748, 'name': PowerOffVM_Task, 'duration_secs': 0.226451} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.346020] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1228.346206] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1228.346485] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9426c747-7cbe-47dc-8b85-326ca31321b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.595555] env[61978]: INFO nova.compute.manager [-] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Took 1.31 seconds to deallocate network for instance. [ 1228.657817] env[61978]: DEBUG oslo_vmware.api [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395747, 'name': PowerOnVM_Task, 'duration_secs': 0.519005} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.658131] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1228.658353] env[61978]: INFO nova.compute.manager [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Took 7.98 seconds to spawn the instance on the hypervisor. [ 1228.658542] env[61978]: DEBUG nova.compute.manager [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1228.659313] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c27dbd-9e7a-4f5a-9543-5e8a0dacf9f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.745854] env[61978]: DEBUG nova.compute.manager [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1228.776436] env[61978]: DEBUG nova.network.neutron [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Successfully created port: cb02d7c2-d091-4929-a5bd-80c484b81de0 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1229.103214] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.175799] env[61978]: INFO nova.compute.manager [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Took 25.45 seconds to build instance. [ 1229.245248] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1229.245770] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquired lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.246101] env[61978]: DEBUG nova.network.neutron [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1229.263175] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c942bf72-01ad-4d04-bceb-e4c2216466b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.270663] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c1465b-dd29-42e7-9dd2-50514e19b11f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.304053] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4114bab3-f65c-4047-be88-b56545c5871f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.312825] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a01170e-a635-422d-b04a-a94b7f78475e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.332943] env[61978]: DEBUG nova.compute.provider_tree [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1229.386877] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1229.387132] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1229.387516] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Deleting the datastore file [datastore1] 90a38dba-0dae-455a-8d02-44c2bb098fb5 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1229.387638] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a6f49af-1116-44ca-acd6-fb6e056df99b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.393584] env[61978]: DEBUG oslo_vmware.api [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1229.393584] env[61978]: value = "task-1395750" [ 1229.393584] env[61978]: _type = "Task" [ 1229.393584] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.404295] env[61978]: DEBUG oslo_vmware.api [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395750, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.678430] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32b250c-fad4-410e-9e22-d224ccc32444 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Lock "27713bbd-1234-44ae-8520-78d85baaae12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.961s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.760225] env[61978]: DEBUG nova.compute.manager [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1229.796125] env[61978]: DEBUG nova.virt.hardware [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1229.796125] env[61978]: DEBUG nova.virt.hardware [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1229.796125] env[61978]: DEBUG nova.virt.hardware [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1229.796125] env[61978]: DEBUG nova.virt.hardware [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1229.796125] env[61978]: DEBUG nova.virt.hardware [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1229.796840] env[61978]: DEBUG nova.virt.hardware [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1229.797267] env[61978]: DEBUG nova.virt.hardware [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1229.797612] env[61978]: DEBUG nova.virt.hardware [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1229.797950] env[61978]: DEBUG nova.virt.hardware [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1229.798286] env[61978]: DEBUG nova.virt.hardware [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1229.798633] env[61978]: DEBUG nova.virt.hardware [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1229.801076] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c60e6e0-739c-4fc6-a6c5-4ede296d3f98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.813107] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf35fd6c-fe24-4e22-9fb2-dca13be6a81e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.833577] env[61978]: DEBUG nova.scheduler.client.report [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1229.903457] env[61978]: DEBUG oslo_vmware.api [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395750, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184247} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.907073] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1229.907073] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1229.907073] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1229.907073] env[61978]: INFO nova.compute.manager [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Took 2.09 seconds to destroy the instance on the hypervisor. [ 1229.907073] env[61978]: DEBUG oslo.service.loopingcall [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1229.907073] env[61978]: DEBUG nova.compute.manager [-] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1229.907073] env[61978]: DEBUG nova.network.neutron [-] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1229.983159] env[61978]: INFO nova.compute.manager [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Rescuing [ 1229.984486] env[61978]: DEBUG oslo_concurrency.lockutils [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquiring lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1229.986321] env[61978]: DEBUG oslo_concurrency.lockutils [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquired lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.986321] env[61978]: DEBUG nova.network.neutron [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1230.084414] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1230.084414] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.084414] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.084414] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.084414] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.084414] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.084414] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.084414] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1230.084414] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.284111] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "interface-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.284571] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.284811] env[61978]: DEBUG nova.objects.instance [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'flavor' on Instance uuid 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.339076] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.339621] env[61978]: DEBUG nova.compute.manager [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1230.343624] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.416s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.344666] env[61978]: DEBUG nova.objects.instance [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lazy-loading 'resources' on Instance uuid 68791dff-12e0-499d-8835-1e9173af570f {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.399163] env[61978]: DEBUG nova.network.neutron [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Updating instance_info_cache with network_info: [{"id": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "address": "fa:16:3e:1e:db:c1", "network": {"id": "aa63ec3f-fde3-49f2-ab12-71ae85601428", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-445619089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "198eab494c0a4e0eb83bae5732df9c78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "650f7968-4522-4ba5-8304-1b9949951ed7", "external-id": "nsx-vlan-transportzone-568", "segmentation_id": 568, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbba3eeec-25", "ovs_interfaceid": "bba3eeec-259f-4ea3-b0f6-e509a29d33f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.509837] env[61978]: DEBUG nova.network.neutron [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Successfully updated port: cb02d7c2-d091-4929-a5bd-80c484b81de0 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1230.571687] env[61978]: DEBUG nova.compute.manager [req-cc65978a-f391-4e6c-92e8-1865c9100b45 req-d446331c-f2d0-4ecd-bc44-e5c7d24d94b4 service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Received event network-vif-plugged-cb02d7c2-d091-4929-a5bd-80c484b81de0 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1230.572274] env[61978]: DEBUG oslo_concurrency.lockutils [req-cc65978a-f391-4e6c-92e8-1865c9100b45 req-d446331c-f2d0-4ecd-bc44-e5c7d24d94b4 service nova] Acquiring lock "f33d00ec-72b7-43f2-bc0d-320e3219ae47-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.572274] env[61978]: DEBUG oslo_concurrency.lockutils [req-cc65978a-f391-4e6c-92e8-1865c9100b45 req-d446331c-f2d0-4ecd-bc44-e5c7d24d94b4 service nova] Lock "f33d00ec-72b7-43f2-bc0d-320e3219ae47-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.572407] env[61978]: DEBUG oslo_concurrency.lockutils [req-cc65978a-f391-4e6c-92e8-1865c9100b45 req-d446331c-f2d0-4ecd-bc44-e5c7d24d94b4 service nova] Lock "f33d00ec-72b7-43f2-bc0d-320e3219ae47-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.572504] env[61978]: DEBUG nova.compute.manager [req-cc65978a-f391-4e6c-92e8-1865c9100b45 req-d446331c-f2d0-4ecd-bc44-e5c7d24d94b4 service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] No waiting events found dispatching network-vif-plugged-cb02d7c2-d091-4929-a5bd-80c484b81de0 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1230.572639] env[61978]: WARNING nova.compute.manager [req-cc65978a-f391-4e6c-92e8-1865c9100b45 req-d446331c-f2d0-4ecd-bc44-e5c7d24d94b4 service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Received unexpected event network-vif-plugged-cb02d7c2-d091-4929-a5bd-80c484b81de0 for instance with vm_state building and task_state spawning. [ 1230.587495] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.740992] env[61978]: DEBUG nova.network.neutron [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updating instance_info_cache with network_info: [{"id": "47785e4d-5976-42da-b954-01d1e5ec6d75", "address": "fa:16:3e:06:c3:3b", "network": {"id": "b5590b99-32cd-425d-be74-a3ce197f3da6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1851734399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6968cd62406944afad1081b2558d4949", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ad8894f-e240-4013-8272-4e79daea0751", "external-id": "nsx-vlan-transportzone-204", "segmentation_id": 204, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47785e4d-59", "ovs_interfaceid": "47785e4d-5976-42da-b954-01d1e5ec6d75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.848175] env[61978]: DEBUG nova.compute.utils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1230.852294] env[61978]: DEBUG nova.compute.manager [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1230.852456] env[61978]: DEBUG nova.network.neutron [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1230.864743] env[61978]: DEBUG nova.network.neutron [-] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.872076] env[61978]: DEBUG nova.objects.instance [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'pci_requests' on Instance uuid 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.902488] env[61978]: DEBUG oslo_concurrency.lockutils [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Releasing lock "refresh_cache-17c56c1c-9992-4559-ad23-c68909ae6792" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1230.906138] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d07dcf1-eca8-4374-bc19-2b72f145d207 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.909346] env[61978]: DEBUG nova.policy [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f20b272502341bd80be470f98554d1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d95ebcafdca43b8a1636e21c7258803', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1230.917158] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Resuming the VM {{(pid=61978) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1230.918642] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8f75079-d472-40bc-8e3d-f348e2dcca02 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.925750] env[61978]: DEBUG oslo_vmware.api [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1230.925750] env[61978]: value = "task-1395751" [ 1230.925750] env[61978]: _type = "Task" [ 1230.925750] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.933901] env[61978]: DEBUG oslo_vmware.api [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395751, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.016122] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "refresh_cache-f33d00ec-72b7-43f2-bc0d-320e3219ae47" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1231.016685] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "refresh_cache-f33d00ec-72b7-43f2-bc0d-320e3219ae47" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.016767] env[61978]: DEBUG nova.network.neutron [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1231.152631] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179ab24e-7cc0-4c7f-a6c2-2f6d7b39e5a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.160514] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce256e25-314c-4122-a540-51a4a7d62796 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.195402] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c393b4c7-d943-4d0f-a6c9-6a553a5cc75f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.199271] env[61978]: DEBUG nova.network.neutron [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Successfully created port: 69d57c29-bde4-4e04-8f75-f8f4e410d10b {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1231.207076] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8e2b6f-b70e-401e-b1fa-07f0b954ceb5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.222011] env[61978]: DEBUG nova.compute.provider_tree [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.244537] env[61978]: DEBUG oslo_concurrency.lockutils [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Releasing lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1231.352920] env[61978]: DEBUG nova.compute.manager [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1231.373796] env[61978]: INFO nova.compute.manager [-] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Took 1.47 seconds to deallocate network for instance. [ 1231.374449] env[61978]: DEBUG nova.objects.base [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Object Instance<758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9> lazy-loaded attributes: flavor,pci_requests {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1231.374652] env[61978]: DEBUG nova.network.neutron [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1231.435959] env[61978]: DEBUG oslo_vmware.api [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395751, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.437618] env[61978]: DEBUG nova.policy [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '346f982562de48fab1702aca567113ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a4f29a959447159b2f7d194ea94873', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1231.548255] env[61978]: DEBUG nova.network.neutron [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1231.682413] env[61978]: DEBUG nova.network.neutron [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Updating instance_info_cache with network_info: [{"id": "cb02d7c2-d091-4929-a5bd-80c484b81de0", "address": "fa:16:3e:3e:15:16", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb02d7c2-d0", "ovs_interfaceid": "cb02d7c2-d091-4929-a5bd-80c484b81de0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.726263] env[61978]: DEBUG nova.scheduler.client.report [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1231.788738] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1231.789045] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c81aa02-a3ac-46aa-8bb2-06167fb456dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.796620] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1231.796620] env[61978]: value = "task-1395752" [ 1231.796620] env[61978]: _type = "Task" [ 1231.796620] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.815495] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395752, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.867020] env[61978]: DEBUG nova.network.neutron [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Successfully created port: 45cdb631-5f61-4991-973e-3ba5b5ff0820 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1231.883293] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.887346] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Acquiring lock "f9b57cf4-f2e4-4d2a-9bd4-74952d46876d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.887622] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Lock "f9b57cf4-f2e4-4d2a-9bd4-74952d46876d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.936371] env[61978]: DEBUG oslo_vmware.api [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395751, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.184922] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "refresh_cache-f33d00ec-72b7-43f2-bc0d-320e3219ae47" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1232.185154] env[61978]: DEBUG nova.compute.manager [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Instance network_info: |[{"id": "cb02d7c2-d091-4929-a5bd-80c484b81de0", "address": "fa:16:3e:3e:15:16", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb02d7c2-d0", "ovs_interfaceid": "cb02d7c2-d091-4929-a5bd-80c484b81de0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1232.185623] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:15:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb02d7c2-d091-4929-a5bd-80c484b81de0', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1232.194112] env[61978]: DEBUG oslo.service.loopingcall [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1232.194366] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1232.194987] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-58a05886-ac26-4a39-9d81-a376298d66f9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.214074] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1232.214074] env[61978]: value = "task-1395753" [ 1232.214074] env[61978]: _type = "Task" [ 1232.214074] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.221639] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395753, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.229513] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.887s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.231701] env[61978]: DEBUG oslo_concurrency.lockutils [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.018s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.231906] env[61978]: DEBUG nova.objects.instance [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61978) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1232.262653] env[61978]: INFO nova.scheduler.client.report [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted allocations for instance 68791dff-12e0-499d-8835-1e9173af570f [ 1232.308987] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395752, 'name': PowerOffVM_Task, 'duration_secs': 0.338815} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.309156] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1232.309906] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc7e18d-a960-495e-bc61-a91d3558690d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.331057] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9104d725-a133-474d-9dbb-150ef0b1ec07 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.363473] env[61978]: DEBUG nova.compute.manager [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1232.366995] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1232.367511] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c984f7c0-e03b-429d-ad6a-aaecf6d3a694 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.374142] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1232.374142] env[61978]: value = "task-1395754" [ 1232.374142] env[61978]: _type = "Task" [ 1232.374142] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.382250] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395754, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.387901] env[61978]: DEBUG nova.virt.hardware [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1232.388149] env[61978]: DEBUG nova.virt.hardware [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1232.388311] env[61978]: DEBUG nova.virt.hardware [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1232.388503] env[61978]: DEBUG nova.virt.hardware [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1232.388659] env[61978]: DEBUG nova.virt.hardware [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1232.388815] env[61978]: DEBUG nova.virt.hardware [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1232.389021] env[61978]: DEBUG nova.virt.hardware [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1232.389190] env[61978]: DEBUG nova.virt.hardware [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1232.389360] env[61978]: DEBUG nova.virt.hardware [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1232.389528] env[61978]: DEBUG nova.virt.hardware [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1232.389710] env[61978]: DEBUG nova.virt.hardware [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1232.390486] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9384167a-118d-4cbd-817b-d6902b1840d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.393483] env[61978]: DEBUG nova.compute.manager [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1232.400710] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e069fed-2f43-495b-a42e-65feb70993ca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.435848] env[61978]: DEBUG oslo_vmware.api [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395751, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.602475] env[61978]: DEBUG nova.compute.manager [req-2782e563-abae-4b57-ad72-4c90c36885ef req-ddc32bb0-bdf2-4202-8f78-2e66bddd17a5 service nova] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Received event network-vif-deleted-2759dd21-49e4-41f6-a462-e567457ab39c {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1232.602632] env[61978]: DEBUG nova.compute.manager [req-2782e563-abae-4b57-ad72-4c90c36885ef req-ddc32bb0-bdf2-4202-8f78-2e66bddd17a5 service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Received event network-changed-cb02d7c2-d091-4929-a5bd-80c484b81de0 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1232.602826] env[61978]: DEBUG nova.compute.manager [req-2782e563-abae-4b57-ad72-4c90c36885ef req-ddc32bb0-bdf2-4202-8f78-2e66bddd17a5 service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Refreshing instance network info cache due to event network-changed-cb02d7c2-d091-4929-a5bd-80c484b81de0. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1232.603018] env[61978]: DEBUG oslo_concurrency.lockutils [req-2782e563-abae-4b57-ad72-4c90c36885ef req-ddc32bb0-bdf2-4202-8f78-2e66bddd17a5 service nova] Acquiring lock "refresh_cache-f33d00ec-72b7-43f2-bc0d-320e3219ae47" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.603195] env[61978]: DEBUG oslo_concurrency.lockutils [req-2782e563-abae-4b57-ad72-4c90c36885ef req-ddc32bb0-bdf2-4202-8f78-2e66bddd17a5 service nova] Acquired lock "refresh_cache-f33d00ec-72b7-43f2-bc0d-320e3219ae47" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.603414] env[61978]: DEBUG nova.network.neutron [req-2782e563-abae-4b57-ad72-4c90c36885ef req-ddc32bb0-bdf2-4202-8f78-2e66bddd17a5 service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Refreshing network info cache for port cb02d7c2-d091-4929-a5bd-80c484b81de0 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1232.727929] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395753, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.729016] env[61978]: DEBUG nova.network.neutron [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Successfully updated port: 69d57c29-bde4-4e04-8f75-f8f4e410d10b {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1232.770190] env[61978]: DEBUG oslo_concurrency.lockutils [None req-31359506-81f5-4592-b622-dd80a1933cb6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "68791dff-12e0-499d-8835-1e9173af570f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.245s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.886891] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1232.887161] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1232.887423] env[61978]: DEBUG oslo_concurrency.lockutils [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.887576] env[61978]: DEBUG oslo_concurrency.lockutils [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.887822] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1232.888101] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ddd8caa0-df7b-4332-8c8f-ae8bdfde6ac8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.900425] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1232.900604] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1232.903395] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cce2bff2-85b0-4d9f-b187-b09fe65b9b00 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.910517] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1232.910517] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524ccf5f-aea3-7c33-0121-b933a59ff6be" [ 1232.910517] env[61978]: _type = "Task" [ 1232.910517] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.917988] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524ccf5f-aea3-7c33-0121-b933a59ff6be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.919148] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.936673] env[61978]: DEBUG oslo_vmware.api [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395751, 'name': PowerOnVM_Task} progress is 81%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.224773] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395753, 'name': CreateVM_Task, 'duration_secs': 0.623469} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.227186] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1233.228023] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.228215] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.228549] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1233.229214] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a4a540b-dace-4635-a867-d4771fb0109b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.232028] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.232028] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.232028] env[61978]: DEBUG nova.network.neutron [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1233.237285] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1233.237285] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520e9f0e-a879-7506-5ab4-3055983edc66" [ 1233.237285] env[61978]: _type = "Task" [ 1233.237285] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.242720] env[61978]: DEBUG oslo_concurrency.lockutils [None req-196bfbb8-94b0-4d7e-9aef-84b3f5ffedf6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.249543] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.057s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.249879] env[61978]: DEBUG nova.objects.instance [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lazy-loading 'resources' on Instance uuid a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1233.251498] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520e9f0e-a879-7506-5ab4-3055983edc66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.367499] env[61978]: DEBUG nova.network.neutron [req-2782e563-abae-4b57-ad72-4c90c36885ef req-ddc32bb0-bdf2-4202-8f78-2e66bddd17a5 service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Updated VIF entry in instance network info cache for port cb02d7c2-d091-4929-a5bd-80c484b81de0. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1233.367898] env[61978]: DEBUG nova.network.neutron [req-2782e563-abae-4b57-ad72-4c90c36885ef req-ddc32bb0-bdf2-4202-8f78-2e66bddd17a5 service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Updating instance_info_cache with network_info: [{"id": "cb02d7c2-d091-4929-a5bd-80c484b81de0", "address": "fa:16:3e:3e:15:16", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb02d7c2-d0", "ovs_interfaceid": "cb02d7c2-d091-4929-a5bd-80c484b81de0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1233.422857] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524ccf5f-aea3-7c33-0121-b933a59ff6be, 'name': SearchDatastore_Task, 'duration_secs': 0.009713} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.424080] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6305fbdd-aa34-4253-9425-896dcc5a8191 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.433891] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1233.433891] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52561ac2-c3de-c8fb-d2a5-3a1e33bfe20d" [ 1233.433891] env[61978]: _type = "Task" [ 1233.433891] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.441288] env[61978]: DEBUG oslo_vmware.api [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395751, 'name': PowerOnVM_Task, 'duration_secs': 2.345982} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.442031] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Resumed the VM {{(pid=61978) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1233.442247] env[61978]: DEBUG nova.compute.manager [None req-49423639-287e-4431-85ae-968570d7d097 tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1233.443227] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdbd186-1fb5-4db8-9bcd-280ebb43db74 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.449502] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52561ac2-c3de-c8fb-d2a5-3a1e33bfe20d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.607029] env[61978]: DEBUG nova.compute.manager [req-373be2e8-2d71-4425-baf0-4ed3ccd97896 req-30f041eb-8642-4ac3-b304-16f3f695cf26 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received event network-vif-plugged-45cdb631-5f61-4991-973e-3ba5b5ff0820 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1233.607263] env[61978]: DEBUG oslo_concurrency.lockutils [req-373be2e8-2d71-4425-baf0-4ed3ccd97896 req-30f041eb-8642-4ac3-b304-16f3f695cf26 service nova] Acquiring lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.607520] env[61978]: DEBUG oslo_concurrency.lockutils [req-373be2e8-2d71-4425-baf0-4ed3ccd97896 req-30f041eb-8642-4ac3-b304-16f3f695cf26 service nova] Lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.607727] env[61978]: DEBUG oslo_concurrency.lockutils [req-373be2e8-2d71-4425-baf0-4ed3ccd97896 req-30f041eb-8642-4ac3-b304-16f3f695cf26 service nova] Lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.607897] env[61978]: DEBUG nova.compute.manager [req-373be2e8-2d71-4425-baf0-4ed3ccd97896 req-30f041eb-8642-4ac3-b304-16f3f695cf26 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] No waiting events found dispatching network-vif-plugged-45cdb631-5f61-4991-973e-3ba5b5ff0820 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1233.608417] env[61978]: WARNING nova.compute.manager [req-373be2e8-2d71-4425-baf0-4ed3ccd97896 req-30f041eb-8642-4ac3-b304-16f3f695cf26 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received unexpected event network-vif-plugged-45cdb631-5f61-4991-973e-3ba5b5ff0820 for instance with vm_state active and task_state None. [ 1233.619085] env[61978]: DEBUG nova.network.neutron [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Successfully updated port: 45cdb631-5f61-4991-973e-3ba5b5ff0820 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1233.749837] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520e9f0e-a879-7506-5ab4-3055983edc66, 'name': SearchDatastore_Task, 'duration_secs': 0.038007} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.750170] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.750411] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1233.750667] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.779058] env[61978]: DEBUG nova.network.neutron [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1233.870847] env[61978]: DEBUG oslo_concurrency.lockutils [req-2782e563-abae-4b57-ad72-4c90c36885ef req-ddc32bb0-bdf2-4202-8f78-2e66bddd17a5 service nova] Releasing lock "refresh_cache-f33d00ec-72b7-43f2-bc0d-320e3219ae47" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.948481] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52561ac2-c3de-c8fb-d2a5-3a1e33bfe20d, 'name': SearchDatastore_Task, 'duration_secs': 0.017197} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.951498] env[61978]: DEBUG oslo_concurrency.lockutils [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.951589] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 27713bbd-1234-44ae-8520-78d85baaae12/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk. {{(pid=61978) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1233.952157] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.952426] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1233.953133] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b7d580dc-aa2e-46de-9de1-9cd36dc832d4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.954988] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03886c1c-3113-4296-bed5-59bcb0dd4890 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.965423] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1233.965423] env[61978]: value = "task-1395755" [ 1233.965423] env[61978]: _type = "Task" [ 1233.965423] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.969735] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1233.970349] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1233.971061] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0329f428-2655-4eb2-95b4-b661d53dd700 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.976970] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.978089] env[61978]: DEBUG nova.network.neutron [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance_info_cache with network_info: [{"id": "69d57c29-bde4-4e04-8f75-f8f4e410d10b", "address": "fa:16:3e:6b:47:c3", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69d57c29-bd", "ovs_interfaceid": "69d57c29-bde4-4e04-8f75-f8f4e410d10b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1233.984080] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1233.984080] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b6a491-0f4d-6807-5e90-2749dbca8b40" [ 1233.984080] env[61978]: _type = "Task" [ 1233.984080] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.993110] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b6a491-0f4d-6807-5e90-2749dbca8b40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.019221] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5574a997-18a0-455f-b5e3-1f3ef3879a91 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.026502] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c52aeb1-2b3c-43ad-8d99-0863a5875a2e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.059941] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8272811-b3a7-4112-9a8a-cea66b6e24f1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.068795] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420eb076-eb3f-4446-b56b-fe37f52cfcf4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.083029] env[61978]: DEBUG nova.compute.provider_tree [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1234.122638] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1234.122638] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.122638] env[61978]: DEBUG nova.network.neutron [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1234.476007] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395755, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.483749] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1234.484152] env[61978]: DEBUG nova.compute.manager [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Instance network_info: |[{"id": "69d57c29-bde4-4e04-8f75-f8f4e410d10b", "address": "fa:16:3e:6b:47:c3", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69d57c29-bd", "ovs_interfaceid": "69d57c29-bde4-4e04-8f75-f8f4e410d10b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1234.484657] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:47:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '69d57c29-bde4-4e04-8f75-f8f4e410d10b', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1234.492626] env[61978]: DEBUG oslo.service.loopingcall [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1234.492993] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1234.496300] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-031cdaa9-05cd-490e-9095-4c32105e04ad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.517069] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b6a491-0f4d-6807-5e90-2749dbca8b40, 'name': SearchDatastore_Task, 'duration_secs': 0.009425} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.519040] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1234.519040] env[61978]: value = "task-1395756" [ 1234.519040] env[61978]: _type = "Task" [ 1234.519040] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.519488] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdfa5f22-ea06-4698-b1c5-133b8f132ad2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.529930] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395756, 'name': CreateVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.530953] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1234.530953] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528fc0d8-49d9-6767-8d21-1aa62ec50710" [ 1234.530953] env[61978]: _type = "Task" [ 1234.530953] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.539391] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528fc0d8-49d9-6767-8d21-1aa62ec50710, 'name': SearchDatastore_Task, 'duration_secs': 0.009375} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.542248] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1234.542248] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f33d00ec-72b7-43f2-bc0d-320e3219ae47/f33d00ec-72b7-43f2-bc0d-320e3219ae47.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1234.542248] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-055a91a8-d53f-4aa2-bf93-f046998b751c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.549702] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1234.549702] env[61978]: value = "task-1395757" [ 1234.549702] env[61978]: _type = "Task" [ 1234.549702] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.560380] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395757, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.586869] env[61978]: DEBUG nova.scheduler.client.report [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1234.669106] env[61978]: WARNING nova.network.neutron [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] 3b4d30cc-d9a1-4180-b6eb-881241a1c0f4 already exists in list: networks containing: ['3b4d30cc-d9a1-4180-b6eb-881241a1c0f4']. ignoring it [ 1234.951237] env[61978]: DEBUG nova.compute.manager [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Received event network-vif-plugged-69d57c29-bde4-4e04-8f75-f8f4e410d10b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1234.951588] env[61978]: DEBUG oslo_concurrency.lockutils [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] Acquiring lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.951793] env[61978]: DEBUG oslo_concurrency.lockutils [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] Lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.952130] env[61978]: DEBUG oslo_concurrency.lockutils [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] Lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.952324] env[61978]: DEBUG nova.compute.manager [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] No waiting events found dispatching network-vif-plugged-69d57c29-bde4-4e04-8f75-f8f4e410d10b {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1234.952523] env[61978]: WARNING nova.compute.manager [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Received unexpected event network-vif-plugged-69d57c29-bde4-4e04-8f75-f8f4e410d10b for instance with vm_state building and task_state spawning. [ 1234.952695] env[61978]: DEBUG nova.compute.manager [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Received event network-changed-69d57c29-bde4-4e04-8f75-f8f4e410d10b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1234.952879] env[61978]: DEBUG nova.compute.manager [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Refreshing instance network info cache due to event network-changed-69d57c29-bde4-4e04-8f75-f8f4e410d10b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1234.953326] env[61978]: DEBUG oslo_concurrency.lockutils [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] Acquiring lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1234.953526] env[61978]: DEBUG oslo_concurrency.lockutils [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] Acquired lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.953751] env[61978]: DEBUG nova.network.neutron [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Refreshing network info cache for port 69d57c29-bde4-4e04-8f75-f8f4e410d10b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1234.979611] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584406} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.979873] env[61978]: INFO nova.virt.vmwareapi.ds_util [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 27713bbd-1234-44ae-8520-78d85baaae12/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk. [ 1234.980722] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92db232-ba64-419f-96b5-689023fef7c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.008178] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 27713bbd-1234-44ae-8520-78d85baaae12/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1235.008514] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df2e70f0-0963-4c16-953f-5a57e4d2ee3e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.028304] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1235.028304] env[61978]: value = "task-1395758" [ 1235.028304] env[61978]: _type = "Task" [ 1235.028304] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.036284] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395756, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.040841] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395758, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.058591] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395757, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450099} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.059040] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f33d00ec-72b7-43f2-bc0d-320e3219ae47/f33d00ec-72b7-43f2-bc0d-320e3219ae47.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1235.059681] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1235.059681] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2058f7b-5521-4641-b869-fdd23a91abd1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.066379] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1235.066379] env[61978]: value = "task-1395759" [ 1235.066379] env[61978]: _type = "Task" [ 1235.066379] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.073998] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395759, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.091662] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.094299] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 16.133s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1235.126762] env[61978]: INFO nova.scheduler.client.report [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Deleted allocations for instance a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6 [ 1235.131670] env[61978]: DEBUG nova.network.neutron [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updating instance_info_cache with network_info: [{"id": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "address": "fa:16:3e:14:c6:36", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5e24a1-8e", "ovs_interfaceid": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "45cdb631-5f61-4991-973e-3ba5b5ff0820", "address": "fa:16:3e:ef:97:76", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45cdb631-5f", "ovs_interfaceid": "45cdb631-5f61-4991-973e-3ba5b5ff0820", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.532439] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395756, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.539787] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395758, 'name': ReconfigVM_Task, 'duration_secs': 0.396654} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.540064] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 27713bbd-1234-44ae-8520-78d85baaae12/4732143d-796a-4a66-9f1e-806f8b0654e0-rescue.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1235.540828] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98752161-2646-489b-86c6-c115a903f086 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.567250] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45c9f2bf-28c8-41bd-bb94-e99e202d1852 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.584447] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395759, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073648} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.585626] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1235.585981] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1235.585981] env[61978]: value = "task-1395760" [ 1235.585981] env[61978]: _type = "Task" [ 1235.585981] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.586677] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f09051b-2a9e-4c78-bc09-714d1e7fdc90 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.595813] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395760, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.609393] env[61978]: INFO nova.compute.claims [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1235.621060] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] f33d00ec-72b7-43f2-bc0d-320e3219ae47/f33d00ec-72b7-43f2-bc0d-320e3219ae47.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1235.621600] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-471de264-a85d-4384-abd2-8cc7dbc22e0c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.638283] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1235.638905] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1235.639079] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.639839] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b37c4f-d573-4b5b-8186-3212e780b5a4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.642814] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c3e4f8fd-fca3-4eb6-a40d-9b586136071a tempest-AttachVolumeTestJSON-1446053004 tempest-AttachVolumeTestJSON-1446053004-project-member] Lock "a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.767s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.647445] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1235.647445] env[61978]: value = "task-1395761" [ 1235.647445] env[61978]: _type = "Task" [ 1235.647445] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.662131] env[61978]: DEBUG nova.virt.hardware [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1235.662386] env[61978]: DEBUG nova.virt.hardware [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1235.662550] env[61978]: DEBUG nova.virt.hardware [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1235.662741] env[61978]: DEBUG nova.virt.hardware [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1235.662895] env[61978]: DEBUG nova.virt.hardware [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1235.663066] env[61978]: DEBUG nova.virt.hardware [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1235.663281] env[61978]: DEBUG nova.virt.hardware [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1235.663445] env[61978]: DEBUG nova.virt.hardware [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1235.663618] env[61978]: DEBUG nova.virt.hardware [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1235.663785] env[61978]: DEBUG nova.virt.hardware [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1235.663961] env[61978]: DEBUG nova.virt.hardware [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1235.670201] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Reconfiguring VM to attach interface {{(pid=61978) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1235.676225] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab753cc7-0da2-48c6-9354-5aa9a420bd9b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.695139] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.696551] env[61978]: DEBUG oslo_vmware.api [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1235.696551] env[61978]: value = "task-1395762" [ 1235.696551] env[61978]: _type = "Task" [ 1235.696551] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.704841] env[61978]: DEBUG oslo_vmware.api [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395762, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.767463] env[61978]: DEBUG nova.compute.manager [req-301f98cd-a1bd-45d4-b961-81da3ede7303 req-ab394fdb-58c3-40c9-85a7-ad6d14da16f1 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received event network-changed-45cdb631-5f61-4991-973e-3ba5b5ff0820 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1235.767693] env[61978]: DEBUG nova.compute.manager [req-301f98cd-a1bd-45d4-b961-81da3ede7303 req-ab394fdb-58c3-40c9-85a7-ad6d14da16f1 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Refreshing instance network info cache due to event network-changed-45cdb631-5f61-4991-973e-3ba5b5ff0820. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1235.767823] env[61978]: DEBUG oslo_concurrency.lockutils [req-301f98cd-a1bd-45d4-b961-81da3ede7303 req-ab394fdb-58c3-40c9-85a7-ad6d14da16f1 service nova] Acquiring lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1235.768009] env[61978]: DEBUG oslo_concurrency.lockutils [req-301f98cd-a1bd-45d4-b961-81da3ede7303 req-ab394fdb-58c3-40c9-85a7-ad6d14da16f1 service nova] Acquired lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.768198] env[61978]: DEBUG nova.network.neutron [req-301f98cd-a1bd-45d4-b961-81da3ede7303 req-ab394fdb-58c3-40c9-85a7-ad6d14da16f1 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Refreshing network info cache for port 45cdb631-5f61-4991-973e-3ba5b5ff0820 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1235.813099] env[61978]: DEBUG nova.network.neutron [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updated VIF entry in instance network info cache for port 69d57c29-bde4-4e04-8f75-f8f4e410d10b. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1235.813480] env[61978]: DEBUG nova.network.neutron [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance_info_cache with network_info: [{"id": "69d57c29-bde4-4e04-8f75-f8f4e410d10b", "address": "fa:16:3e:6b:47:c3", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69d57c29-bd", "ovs_interfaceid": "69d57c29-bde4-4e04-8f75-f8f4e410d10b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.035468] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395756, 'name': CreateVM_Task, 'duration_secs': 1.209262} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.035649] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1236.036390] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1236.036687] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.037044] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1236.037511] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e7c64b9-6533-40f1-99b2-e3a3d7226337 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.042156] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1236.042156] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528ce748-a492-f8b1-7019-15bf85fce91d" [ 1236.042156] env[61978]: _type = "Task" [ 1236.042156] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.050741] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528ce748-a492-f8b1-7019-15bf85fce91d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.099527] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395760, 'name': ReconfigVM_Task, 'duration_secs': 0.172741} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.100329] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1236.100329] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f78912d6-8063-4b5d-90d4-8f6eca9783b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.106457] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1236.106457] env[61978]: value = "task-1395763" [ 1236.106457] env[61978]: _type = "Task" [ 1236.106457] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.114378] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395763, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.123249] env[61978]: INFO nova.compute.resource_tracker [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating resource usage from migration 984cd406-40bb-41ab-8948-b39be4277799 [ 1236.171620] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.212317] env[61978]: DEBUG oslo_vmware.api [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.318410] env[61978]: DEBUG oslo_concurrency.lockutils [req-7e538536-09c4-47ef-846d-ddc4351a9ae3 req-dbd95172-6f51-4a23-9c07-ef886c8af824 service nova] Releasing lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1236.381413] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca84bc7-f09f-49ab-9e7d-919e5c9d9f80 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.388763] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c12a09-2051-473c-a1bf-a3315183b604 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.425908] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fc0dd2-b76c-4407-81a0-d6d828ddd6f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.433515] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d6513a-a44b-4b4b-94a1-d2f2253de58a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.447488] env[61978]: DEBUG nova.compute.provider_tree [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1236.552975] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528ce748-a492-f8b1-7019-15bf85fce91d, 'name': SearchDatastore_Task, 'duration_secs': 0.009555} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.553354] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1236.553402] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1236.553634] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1236.553861] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.553959] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1236.554235] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a59103c-f35d-4e26-b46c-70ce9e8367df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.562598] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1236.562903] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1236.563761] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18e4c628-666e-4a86-97b6-b9831a9b2117 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.566786] env[61978]: DEBUG nova.network.neutron [req-301f98cd-a1bd-45d4-b961-81da3ede7303 req-ab394fdb-58c3-40c9-85a7-ad6d14da16f1 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updated VIF entry in instance network info cache for port 45cdb631-5f61-4991-973e-3ba5b5ff0820. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1236.567198] env[61978]: DEBUG nova.network.neutron [req-301f98cd-a1bd-45d4-b961-81da3ede7303 req-ab394fdb-58c3-40c9-85a7-ad6d14da16f1 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updating instance_info_cache with network_info: [{"id": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "address": "fa:16:3e:14:c6:36", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5e24a1-8e", "ovs_interfaceid": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "45cdb631-5f61-4991-973e-3ba5b5ff0820", "address": "fa:16:3e:ef:97:76", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45cdb631-5f", "ovs_interfaceid": "45cdb631-5f61-4991-973e-3ba5b5ff0820", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.573050] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1236.573050] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52809a02-cf41-51ec-e8e3-184d1f8bbac6" [ 1236.573050] env[61978]: _type = "Task" [ 1236.573050] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.581629] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52809a02-cf41-51ec-e8e3-184d1f8bbac6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.618662] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395763, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.672165] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395761, 'name': ReconfigVM_Task, 'duration_secs': 0.968995} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.672615] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Reconfigured VM instance instance-0000005c to attach disk [datastore2] f33d00ec-72b7-43f2-bc0d-320e3219ae47/f33d00ec-72b7-43f2-bc0d-320e3219ae47.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1236.673119] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55b6c626-5670-476d-bdf1-55f527b91d9d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.680027] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1236.680027] env[61978]: value = "task-1395765" [ 1236.680027] env[61978]: _type = "Task" [ 1236.680027] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.689536] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395765, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.706681] env[61978]: DEBUG oslo_vmware.api [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395762, 'name': ReconfigVM_Task, 'duration_secs': 0.729055} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.707232] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1236.707467] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Reconfigured VM to attach interface {{(pid=61978) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1236.950420] env[61978]: DEBUG nova.scheduler.client.report [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1237.178813] env[61978]: DEBUG oslo_concurrency.lockutils [req-301f98cd-a1bd-45d4-b961-81da3ede7303 req-ab394fdb-58c3-40c9-85a7-ad6d14da16f1 service nova] Releasing lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.178813] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52809a02-cf41-51ec-e8e3-184d1f8bbac6, 'name': SearchDatastore_Task, 'duration_secs': 0.027851} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.178813] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09ee12c9-788e-40a7-a23b-28608df5c89b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.178813] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1237.178813] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5222e7d3-e9d5-df58-004f-5621f03dc523" [ 1237.178813] env[61978]: _type = "Task" [ 1237.178813] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.178813] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5222e7d3-e9d5-df58-004f-5621f03dc523, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.178813] env[61978]: DEBUG oslo_vmware.api [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395763, 'name': PowerOnVM_Task, 'duration_secs': 0.639434} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.178813] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1237.178813] env[61978]: DEBUG nova.compute.manager [None req-83d73c28-3578-4071-94fd-24b65410345a tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1237.178813] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e46a15-cbcb-4e2e-ace1-d698f2af625d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.191643] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395765, 'name': Rename_Task, 'duration_secs': 0.402812} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.191941] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1237.192212] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a4e13aa-4751-4853-8969-090fc0197845 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.199824] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1237.199824] env[61978]: value = "task-1395766" [ 1237.199824] env[61978]: _type = "Task" [ 1237.199824] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.208229] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395766, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.211411] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d3db485f-a37d-45fe-87ba-3c2c0ab8ea63 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.927s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.455314] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.361s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.455525] env[61978]: INFO nova.compute.manager [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Migrating [ 1237.464465] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.471s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.464753] env[61978]: DEBUG nova.objects.instance [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'resources' on Instance uuid 97e128f9-7135-46b0-b22a-ee5449ba48b6 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1237.603223] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5222e7d3-e9d5-df58-004f-5621f03dc523, 'name': SearchDatastore_Task, 'duration_secs': 0.024512} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.605674] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.606030] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 4d357d46-8bbb-4228-a5a6-2ce67fe037d7/4d357d46-8bbb-4228-a5a6-2ce67fe037d7.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1237.606476] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4268de51-17a6-40fa-bb6f-18b749518974 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.613149] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1237.613149] env[61978]: value = "task-1395767" [ 1237.613149] env[61978]: _type = "Task" [ 1237.613149] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.623131] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395767, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.712418] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395766, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.718435] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3038b60b-b6b8-49e1-9e6d-a1c77df96ef9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.725918] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95278d3d-e9c5-475e-997b-094374fd5f30 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.757607] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a65379-b631-4c45-b82e-6d08f6f47819 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.765290] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f037f1ad-07be-4f7d-8251-333a2d872c0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.779106] env[61978]: DEBUG nova.compute.provider_tree [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.982938] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.982938] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.982938] env[61978]: DEBUG nova.network.neutron [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1238.125326] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395767, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.211423] env[61978]: DEBUG oslo_vmware.api [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395766, 'name': PowerOnVM_Task, 'duration_secs': 0.831065} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.211423] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1238.211423] env[61978]: INFO nova.compute.manager [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Took 8.45 seconds to spawn the instance on the hypervisor. [ 1238.211423] env[61978]: DEBUG nova.compute.manager [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1238.211423] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af00f6ff-c9a7-45ca-b926-32033c614155 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.282693] env[61978]: DEBUG nova.scheduler.client.report [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1238.627587] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395767, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586733} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.627895] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 4d357d46-8bbb-4228-a5a6-2ce67fe037d7/4d357d46-8bbb-4228-a5a6-2ce67fe037d7.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1238.628109] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1238.628412] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b800060-76ec-43b8-97d7-d4890c5d8a77 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.634965] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1238.634965] env[61978]: value = "task-1395768" [ 1238.634965] env[61978]: _type = "Task" [ 1238.634965] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.644743] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395768, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.729576] env[61978]: INFO nova.compute.manager [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Took 32.38 seconds to build instance. [ 1238.787883] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.323s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.791161] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.698s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.791161] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.792617] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.820s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.796573] env[61978]: INFO nova.compute.claims [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1238.818120] env[61978]: INFO nova.scheduler.client.report [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleted allocations for instance 97e128f9-7135-46b0-b22a-ee5449ba48b6 [ 1238.826422] env[61978]: INFO nova.scheduler.client.report [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Deleted allocations for instance b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2 [ 1239.145866] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395768, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075826} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.150088] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1239.153046] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48783541-e16f-4e13-8a27-c36f8c4c3bc9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.183239] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 4d357d46-8bbb-4228-a5a6-2ce67fe037d7/4d357d46-8bbb-4228-a5a6-2ce67fe037d7.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1239.183975] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-862a6d0e-a477-436c-9bf6-f44336523d22 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.199176] env[61978]: DEBUG nova.network.neutron [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance_info_cache with network_info: [{"id": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "address": "fa:16:3e:3b:95:21", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2d39b09-4a", "ovs_interfaceid": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.207587] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1239.207587] env[61978]: value = "task-1395769" [ 1239.207587] env[61978]: _type = "Task" [ 1239.207587] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.216551] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395769, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.231942] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fd14980d-7be3-477c-97e7-58e1472fec34 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "f33d00ec-72b7-43f2-bc0d-320e3219ae47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.889s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.328377] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6b910b8c-d625-493f-be70-f34e0f88bcb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "97e128f9-7135-46b0-b22a-ee5449ba48b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.873s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.335381] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e13dcc15-2051-4048-a7b5-e4761192ce85 tempest-DeleteServersTestJSON-992411880 tempest-DeleteServersTestJSON-992411880-project-member] Lock "b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.601s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.624025] env[61978]: DEBUG oslo_concurrency.lockutils [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "interface-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-759bb051-5bd1-42fb-896d-ced73e9f67a1" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.624262] env[61978]: DEBUG oslo_concurrency.lockutils [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-759bb051-5bd1-42fb-896d-ced73e9f67a1" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.624650] env[61978]: DEBUG nova.objects.instance [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'flavor' on Instance uuid 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.627775] env[61978]: DEBUG nova.compute.manager [req-0abaa041-f328-408f-a4c5-e7fd2200a237 req-c8c17e09-3fd5-4ccf-af26-a9bcf2ab8477 service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Received event network-changed-47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1239.628058] env[61978]: DEBUG nova.compute.manager [req-0abaa041-f328-408f-a4c5-e7fd2200a237 req-c8c17e09-3fd5-4ccf-af26-a9bcf2ab8477 service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Refreshing instance network info cache due to event network-changed-47785e4d-5976-42da-b954-01d1e5ec6d75. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1239.628210] env[61978]: DEBUG oslo_concurrency.lockutils [req-0abaa041-f328-408f-a4c5-e7fd2200a237 req-c8c17e09-3fd5-4ccf-af26-a9bcf2ab8477 service nova] Acquiring lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.628409] env[61978]: DEBUG oslo_concurrency.lockutils [req-0abaa041-f328-408f-a4c5-e7fd2200a237 req-c8c17e09-3fd5-4ccf-af26-a9bcf2ab8477 service nova] Acquired lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.628592] env[61978]: DEBUG nova.network.neutron [req-0abaa041-f328-408f-a4c5-e7fd2200a237 req-c8c17e09-3fd5-4ccf-af26-a9bcf2ab8477 service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Refreshing network info cache for port 47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1239.701518] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.721197] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395769, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.998839] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678110f3-d015-43e6-b5da-54d3e774566b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.006939] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4345b02-0ce9-47dd-ae23-3b759016410a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.035886] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bc55df-e208-40b8-acca-d9d5b1e7baac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.043256] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ab96b6-06b2-4434-8810-43604dee17b2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.055707] env[61978]: DEBUG nova.compute.provider_tree [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1240.225017] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395769, 'name': ReconfigVM_Task, 'duration_secs': 0.988527} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.225017] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 4d357d46-8bbb-4228-a5a6-2ce67fe037d7/4d357d46-8bbb-4228-a5a6-2ce67fe037d7.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1240.225017] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f0330c4-7757-4f20-b327-b3ed1bffd768 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.229481] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1240.229481] env[61978]: value = "task-1395771" [ 1240.229481] env[61978]: _type = "Task" [ 1240.229481] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.238978] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395771, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.264180] env[61978]: DEBUG nova.objects.instance [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'pci_requests' on Instance uuid 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1240.343075] env[61978]: DEBUG nova.network.neutron [req-0abaa041-f328-408f-a4c5-e7fd2200a237 req-c8c17e09-3fd5-4ccf-af26-a9bcf2ab8477 service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updated VIF entry in instance network info cache for port 47785e4d-5976-42da-b954-01d1e5ec6d75. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1240.343468] env[61978]: DEBUG nova.network.neutron [req-0abaa041-f328-408f-a4c5-e7fd2200a237 req-c8c17e09-3fd5-4ccf-af26-a9bcf2ab8477 service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updating instance_info_cache with network_info: [{"id": "47785e4d-5976-42da-b954-01d1e5ec6d75", "address": "fa:16:3e:06:c3:3b", "network": {"id": "b5590b99-32cd-425d-be74-a3ce197f3da6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1851734399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6968cd62406944afad1081b2558d4949", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ad8894f-e240-4013-8272-4e79daea0751", "external-id": "nsx-vlan-transportzone-204", "segmentation_id": 204, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47785e4d-59", "ovs_interfaceid": "47785e4d-5976-42da-b954-01d1e5ec6d75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.559500] env[61978]: DEBUG nova.scheduler.client.report [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1240.740801] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395771, 'name': Rename_Task, 'duration_secs': 0.379643} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.741253] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1240.741456] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75a2bc7f-838d-4213-9cfa-48f07b3c821e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.747704] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1240.747704] env[61978]: value = "task-1395772" [ 1240.747704] env[61978]: _type = "Task" [ 1240.747704] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.758271] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395772, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.765911] env[61978]: DEBUG nova.objects.base [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Object Instance<758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9> lazy-loaded attributes: flavor,pci_requests {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1240.766163] env[61978]: DEBUG nova.network.neutron [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1240.834140] env[61978]: DEBUG nova.policy [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '346f982562de48fab1702aca567113ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a4f29a959447159b2f7d194ea94873', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1240.846507] env[61978]: DEBUG oslo_concurrency.lockutils [req-0abaa041-f328-408f-a4c5-e7fd2200a237 req-c8c17e09-3fd5-4ccf-af26-a9bcf2ab8477 service nova] Releasing lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.066366] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.274s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.066963] env[61978]: DEBUG nova.compute.manager [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1241.069730] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.967s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.070047] env[61978]: DEBUG nova.objects.instance [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lazy-loading 'resources' on Instance uuid 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1241.226315] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9b47db-b8c8-4505-9a09-595fafdac24e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.250374] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance 'd3c82821-0617-4de6-8109-813a67910ed1' progress to 0 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1241.264270] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395772, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.572509] env[61978]: DEBUG nova.compute.utils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1241.573944] env[61978]: DEBUG nova.compute.manager [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1241.574127] env[61978]: DEBUG nova.network.neutron [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1241.647117] env[61978]: DEBUG nova.policy [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9580f2ba2f244d8c9950bbe509c7c9ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d4d29d9b6a74b4887684c7b310280b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1241.763241] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1241.763648] env[61978]: DEBUG oslo_vmware.api [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395772, 'name': PowerOnVM_Task, 'duration_secs': 0.550896} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.766065] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcb4890f-1c76-4831-8081-381a44ec7ad1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.767695] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1241.767916] env[61978]: INFO nova.compute.manager [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1241.768116] env[61978]: DEBUG nova.compute.manager [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1241.769065] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a9223b-1b4e-49e2-9e24-aa76f71532eb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.781334] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9d68c5-6029-4a80-b562-57feeb31a0c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.783982] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1241.783982] env[61978]: value = "task-1395773" [ 1241.783982] env[61978]: _type = "Task" [ 1241.783982] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.792120] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ed1735-0776-4e4e-b2ea-51d56bac7204 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.799317] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395773, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.829844] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03529b3f-610e-4da6-a38e-c5430cb682cd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.838663] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f9a8c8-fa85-437f-aa1b-82a076a2fd11 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.852190] env[61978]: DEBUG nova.compute.provider_tree [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1242.077036] env[61978]: DEBUG nova.compute.manager [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1242.169358] env[61978]: DEBUG nova.network.neutron [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Successfully created port: 76c38b35-52ad-477c-9dfd-e1c64c8a7889 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1242.295308] env[61978]: INFO nova.compute.manager [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Took 34.06 seconds to build instance. [ 1242.300343] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395773, 'name': PowerOffVM_Task, 'duration_secs': 0.41912} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.300717] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1242.300963] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance 'd3c82821-0617-4de6-8109-813a67910ed1' progress to 17 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1242.355947] env[61978]: DEBUG nova.scheduler.client.report [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1242.366905] env[61978]: DEBUG nova.network.neutron [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Successfully updated port: 759bb051-5bd1-42fb-896d-ced73e9f67a1 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1242.797554] env[61978]: DEBUG oslo_concurrency.lockutils [None req-387220d4-2fc6-44a6-ae7a-754b3294829e tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.568s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.807916] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1242.808218] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1242.808965] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1242.808965] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1242.808965] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1242.808965] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1242.809190] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1242.809264] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1242.809443] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1242.809613] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1242.809793] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1242.814716] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80517809-d710-4221-b30c-a5ddf0c5f6c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.830721] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1242.830721] env[61978]: value = "task-1395774" [ 1242.830721] env[61978]: _type = "Task" [ 1242.830721] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.839061] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395774, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.864118] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.793s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.865525] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 12.278s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.865720] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.865916] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1242.866506] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.983s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.866506] env[61978]: DEBUG nova.objects.instance [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lazy-loading 'resources' on Instance uuid 90a38dba-0dae-455a-8d02-44c2bb098fb5 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1242.868168] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26fa07f-9e5f-4ca6-894e-7f9bfef32a56 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.872943] env[61978]: DEBUG oslo_concurrency.lockutils [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1242.872943] env[61978]: DEBUG oslo_concurrency.lockutils [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.872943] env[61978]: DEBUG nova.network.neutron [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1242.881290] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef82f7ff-dab5-4dbe-a1e1-c7243f989551 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.897707] env[61978]: INFO nova.scheduler.client.report [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Deleted allocations for instance 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7 [ 1242.899240] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8ea4c7-1c13-46a9-a7f0-118e355a2fb4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.908527] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0fb07d-e6f8-451f-82eb-5e4c4c057528 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.939255] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179590MB free_disk=185GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1242.939424] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.092758] env[61978]: DEBUG nova.compute.manager [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1243.120871] env[61978]: DEBUG nova.virt.hardware [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1243.121156] env[61978]: DEBUG nova.virt.hardware [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1243.121324] env[61978]: DEBUG nova.virt.hardware [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1243.121512] env[61978]: DEBUG nova.virt.hardware [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1243.121666] env[61978]: DEBUG nova.virt.hardware [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1243.121817] env[61978]: DEBUG nova.virt.hardware [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1243.122043] env[61978]: DEBUG nova.virt.hardware [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1243.122216] env[61978]: DEBUG nova.virt.hardware [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1243.122389] env[61978]: DEBUG nova.virt.hardware [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1243.122557] env[61978]: DEBUG nova.virt.hardware [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1243.122737] env[61978]: DEBUG nova.virt.hardware [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1243.123627] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a591fe4a-f570-451f-88da-284120777b6e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.132059] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab461128-f80f-42d6-ac6d-7856e3387614 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.340540] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395774, 'name': ReconfigVM_Task, 'duration_secs': 0.165148} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.340878] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance 'd3c82821-0617-4de6-8109-813a67910ed1' progress to 33 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1243.408272] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3b24350f-e8e6-4835-a1f0-531a6f6d67a0 tempest-ServerRescueNegativeTestJSON-2041106263 tempest-ServerRescueNegativeTestJSON-2041106263-project-member] Lock "8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.267s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.429820] env[61978]: WARNING nova.network.neutron [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] 3b4d30cc-d9a1-4180-b6eb-881241a1c0f4 already exists in list: networks containing: ['3b4d30cc-d9a1-4180-b6eb-881241a1c0f4']. ignoring it [ 1243.430032] env[61978]: WARNING nova.network.neutron [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] 3b4d30cc-d9a1-4180-b6eb-881241a1c0f4 already exists in list: networks containing: ['3b4d30cc-d9a1-4180-b6eb-881241a1c0f4']. ignoring it [ 1243.487339] env[61978]: DEBUG nova.compute.manager [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Received event network-changed-47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1243.487429] env[61978]: DEBUG nova.compute.manager [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Refreshing instance network info cache due to event network-changed-47785e4d-5976-42da-b954-01d1e5ec6d75. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1243.487577] env[61978]: DEBUG oslo_concurrency.lockutils [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] Acquiring lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1243.487774] env[61978]: DEBUG oslo_concurrency.lockutils [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] Acquired lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.487952] env[61978]: DEBUG nova.network.neutron [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Refreshing network info cache for port 47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1243.516896] env[61978]: DEBUG nova.compute.manager [req-5844be18-53da-4a82-8a35-fcfeba351c12 req-0a59f753-da70-4978-857d-7d9553168c36 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received event network-vif-plugged-759bb051-5bd1-42fb-896d-ced73e9f67a1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1243.517343] env[61978]: DEBUG oslo_concurrency.lockutils [req-5844be18-53da-4a82-8a35-fcfeba351c12 req-0a59f753-da70-4978-857d-7d9553168c36 service nova] Acquiring lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.517768] env[61978]: DEBUG oslo_concurrency.lockutils [req-5844be18-53da-4a82-8a35-fcfeba351c12 req-0a59f753-da70-4978-857d-7d9553168c36 service nova] Lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.518483] env[61978]: DEBUG oslo_concurrency.lockutils [req-5844be18-53da-4a82-8a35-fcfeba351c12 req-0a59f753-da70-4978-857d-7d9553168c36 service nova] Lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.518860] env[61978]: DEBUG nova.compute.manager [req-5844be18-53da-4a82-8a35-fcfeba351c12 req-0a59f753-da70-4978-857d-7d9553168c36 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] No waiting events found dispatching network-vif-plugged-759bb051-5bd1-42fb-896d-ced73e9f67a1 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1243.519199] env[61978]: WARNING nova.compute.manager [req-5844be18-53da-4a82-8a35-fcfeba351c12 req-0a59f753-da70-4978-857d-7d9553168c36 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received unexpected event network-vif-plugged-759bb051-5bd1-42fb-896d-ced73e9f67a1 for instance with vm_state active and task_state None. [ 1243.600910] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d624c10e-1752-4c74-a899-61742bea3a5d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.611460] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1fce0f-9cda-46a9-bc02-6c5aede7af32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.643262] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582a8244-99c6-41fb-9bc1-bf85805d9f27 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.655560] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c833fc3-e349-4c62-9d25-52fbd65f3a8d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.674437] env[61978]: DEBUG nova.compute.provider_tree [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1243.847798] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1243.848141] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1243.848247] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1243.848476] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1243.848903] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1243.849192] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1243.849455] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1243.849745] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1243.850091] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1243.850407] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1243.850670] env[61978]: DEBUG nova.virt.hardware [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1243.857860] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfiguring VM instance instance-00000050 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1243.860500] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-658f34e8-d275-4807-8e02-f0a5c680cd4a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.880549] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1243.880549] env[61978]: value = "task-1395775" [ 1243.880549] env[61978]: _type = "Task" [ 1243.880549] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.888295] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395775, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.986072] env[61978]: DEBUG nova.network.neutron [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updating instance_info_cache with network_info: [{"id": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "address": "fa:16:3e:14:c6:36", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5e24a1-8e", "ovs_interfaceid": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "45cdb631-5f61-4991-973e-3ba5b5ff0820", "address": "fa:16:3e:ef:97:76", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45cdb631-5f", "ovs_interfaceid": "45cdb631-5f61-4991-973e-3ba5b5ff0820", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "759bb051-5bd1-42fb-896d-ced73e9f67a1", "address": "fa:16:3e:b1:58:6f", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap759bb051-5b", "ovs_interfaceid": "759bb051-5bd1-42fb-896d-ced73e9f67a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.181346] env[61978]: DEBUG nova.scheduler.client.report [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1244.234205] env[61978]: DEBUG nova.network.neutron [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updated VIF entry in instance network info cache for port 47785e4d-5976-42da-b954-01d1e5ec6d75. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1244.234678] env[61978]: DEBUG nova.network.neutron [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updating instance_info_cache with network_info: [{"id": "47785e4d-5976-42da-b954-01d1e5ec6d75", "address": "fa:16:3e:06:c3:3b", "network": {"id": "b5590b99-32cd-425d-be74-a3ce197f3da6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1851734399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6968cd62406944afad1081b2558d4949", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ad8894f-e240-4013-8272-4e79daea0751", "external-id": "nsx-vlan-transportzone-204", "segmentation_id": 204, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47785e4d-59", "ovs_interfaceid": "47785e4d-5976-42da-b954-01d1e5ec6d75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.390870] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395775, 'name': ReconfigVM_Task, 'duration_secs': 0.20277} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.391335] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfigured VM instance instance-00000050 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1244.392467] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729d3ef2-176c-403d-9be7-3f74177532fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.418609] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] d3c82821-0617-4de6-8109-813a67910ed1/d3c82821-0617-4de6-8109-813a67910ed1.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1244.418957] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c26d5f80-a91f-4996-b9d8-4ca52465af16 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.437808] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1244.437808] env[61978]: value = "task-1395776" [ 1244.437808] env[61978]: _type = "Task" [ 1244.437808] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.446389] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395776, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.489801] env[61978]: DEBUG oslo_concurrency.lockutils [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1244.490520] env[61978]: DEBUG oslo_concurrency.lockutils [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.490693] env[61978]: DEBUG oslo_concurrency.lockutils [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.491565] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe62c42-7072-4e9a-84a5-0269a8053b0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.509627] env[61978]: DEBUG nova.virt.hardware [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1244.509930] env[61978]: DEBUG nova.virt.hardware [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1244.510040] env[61978]: DEBUG nova.virt.hardware [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1244.510233] env[61978]: DEBUG nova.virt.hardware [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1244.510386] env[61978]: DEBUG nova.virt.hardware [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1244.510537] env[61978]: DEBUG nova.virt.hardware [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1244.510766] env[61978]: DEBUG nova.virt.hardware [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1244.510918] env[61978]: DEBUG nova.virt.hardware [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1244.511110] env[61978]: DEBUG nova.virt.hardware [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1244.511286] env[61978]: DEBUG nova.virt.hardware [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1244.511464] env[61978]: DEBUG nova.virt.hardware [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1244.517750] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Reconfiguring VM to attach interface {{(pid=61978) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1244.518142] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-645d16df-66a8-4019-84ae-1a76d0769c73 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.535372] env[61978]: DEBUG oslo_vmware.api [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1244.535372] env[61978]: value = "task-1395777" [ 1244.535372] env[61978]: _type = "Task" [ 1244.535372] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.543412] env[61978]: DEBUG oslo_vmware.api [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395777, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.598365] env[61978]: DEBUG nova.network.neutron [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Successfully updated port: 76c38b35-52ad-477c-9dfd-e1c64c8a7889 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1244.686326] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.820s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.689059] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.770s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.691461] env[61978]: INFO nova.compute.claims [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1244.723536] env[61978]: INFO nova.scheduler.client.report [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Deleted allocations for instance 90a38dba-0dae-455a-8d02-44c2bb098fb5 [ 1244.738017] env[61978]: DEBUG oslo_concurrency.lockutils [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] Releasing lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1244.738017] env[61978]: DEBUG nova.compute.manager [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Received event network-changed-cb02d7c2-d091-4929-a5bd-80c484b81de0 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1244.738217] env[61978]: DEBUG nova.compute.manager [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Refreshing instance network info cache due to event network-changed-cb02d7c2-d091-4929-a5bd-80c484b81de0. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1244.738381] env[61978]: DEBUG oslo_concurrency.lockutils [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] Acquiring lock "refresh_cache-f33d00ec-72b7-43f2-bc0d-320e3219ae47" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.738505] env[61978]: DEBUG oslo_concurrency.lockutils [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] Acquired lock "refresh_cache-f33d00ec-72b7-43f2-bc0d-320e3219ae47" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.738676] env[61978]: DEBUG nova.network.neutron [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Refreshing network info cache for port cb02d7c2-d091-4929-a5bd-80c484b81de0 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1244.947816] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395776, 'name': ReconfigVM_Task, 'duration_secs': 0.37306} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.948195] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfigured VM instance instance-00000050 to attach disk [datastore2] d3c82821-0617-4de6-8109-813a67910ed1/d3c82821-0617-4de6-8109-813a67910ed1.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1244.948417] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance 'd3c82821-0617-4de6-8109-813a67910ed1' progress to 50 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1245.046832] env[61978]: DEBUG oslo_vmware.api [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395777, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.100872] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "refresh_cache-03b08977-4b20-4bac-b48b-06ba5df4e579" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1245.100872] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquired lock "refresh_cache-03b08977-4b20-4bac-b48b-06ba5df4e579" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.100872] env[61978]: DEBUG nova.network.neutron [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1245.237103] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b280d893-d82e-4279-9b5c-25a26f660251 tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "90a38dba-0dae-455a-8d02-44c2bb098fb5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.426s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.456025] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab0dfaa-51f7-459f-9413-4743ddf39adb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.483667] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b23711-d505-40b1-98c7-75f40ac8c081 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.505760] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance 'd3c82821-0617-4de6-8109-813a67910ed1' progress to 67 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1245.546029] env[61978]: DEBUG oslo_vmware.api [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395777, 'name': ReconfigVM_Task, 'duration_secs': 0.90864} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.546682] env[61978]: DEBUG oslo_concurrency.lockutils [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1245.546985] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Reconfigured VM to attach interface {{(pid=61978) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1245.600499] env[61978]: DEBUG nova.network.neutron [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Updated VIF entry in instance network info cache for port cb02d7c2-d091-4929-a5bd-80c484b81de0. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1245.600950] env[61978]: DEBUG nova.network.neutron [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Updating instance_info_cache with network_info: [{"id": "cb02d7c2-d091-4929-a5bd-80c484b81de0", "address": "fa:16:3e:3e:15:16", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb02d7c2-d0", "ovs_interfaceid": "cb02d7c2-d091-4929-a5bd-80c484b81de0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.678129] env[61978]: DEBUG nova.network.neutron [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1245.930942] env[61978]: DEBUG nova.network.neutron [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Updating instance_info_cache with network_info: [{"id": "76c38b35-52ad-477c-9dfd-e1c64c8a7889", "address": "fa:16:3e:9f:0a:d0", "network": {"id": "61175c4b-0b5d-4186-97ac-7a615b95ef28", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-341600036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d4d29d9b6a74b4887684c7b310280b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76c38b35-52", "ovs_interfaceid": "76c38b35-52ad-477c-9dfd-e1c64c8a7889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.942330] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e464c6d-ef81-4670-992c-af729fc0b975 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.953956] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b7e9f5-45a1-4492-a25f-7b85a8616c45 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.987943] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa5a24a-4cc6-4e1c-a775-e0779511f160 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.997207] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b083f9-f3bf-4697-b352-9875cb7338e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.016417] env[61978]: DEBUG nova.compute.provider_tree [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1246.052394] env[61978]: DEBUG oslo_concurrency.lockutils [None req-185c6880-427f-469b-ae01-03e5f8ae10c8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-759bb051-5bd1-42fb-896d-ced73e9f67a1" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.428s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.106549] env[61978]: DEBUG oslo_concurrency.lockutils [req-971d6fcc-dd3a-4d26-a1ec-eaaa0d3b6223 req-54616a39-a9de-4f8a-a3f6-65c4ade05c0c service nova] Releasing lock "refresh_cache-f33d00ec-72b7-43f2-bc0d-320e3219ae47" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1246.119848] env[61978]: DEBUG nova.compute.manager [req-1974b483-399a-4f89-bf6d-270528e1f5e8 req-c568840a-b2c7-4237-b577-fa48d6ffa3cd service nova] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Received event network-vif-plugged-76c38b35-52ad-477c-9dfd-e1c64c8a7889 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1246.120039] env[61978]: DEBUG oslo_concurrency.lockutils [req-1974b483-399a-4f89-bf6d-270528e1f5e8 req-c568840a-b2c7-4237-b577-fa48d6ffa3cd service nova] Acquiring lock "03b08977-4b20-4bac-b48b-06ba5df4e579-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.120288] env[61978]: DEBUG oslo_concurrency.lockutils [req-1974b483-399a-4f89-bf6d-270528e1f5e8 req-c568840a-b2c7-4237-b577-fa48d6ffa3cd service nova] Lock "03b08977-4b20-4bac-b48b-06ba5df4e579-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.120524] env[61978]: DEBUG oslo_concurrency.lockutils [req-1974b483-399a-4f89-bf6d-270528e1f5e8 req-c568840a-b2c7-4237-b577-fa48d6ffa3cd service nova] Lock "03b08977-4b20-4bac-b48b-06ba5df4e579-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.120624] env[61978]: DEBUG nova.compute.manager [req-1974b483-399a-4f89-bf6d-270528e1f5e8 req-c568840a-b2c7-4237-b577-fa48d6ffa3cd service nova] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] No waiting events found dispatching network-vif-plugged-76c38b35-52ad-477c-9dfd-e1c64c8a7889 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1246.120775] env[61978]: WARNING nova.compute.manager [req-1974b483-399a-4f89-bf6d-270528e1f5e8 req-c568840a-b2c7-4237-b577-fa48d6ffa3cd service nova] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Received unexpected event network-vif-plugged-76c38b35-52ad-477c-9dfd-e1c64c8a7889 for instance with vm_state building and task_state spawning. [ 1246.433488] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Releasing lock "refresh_cache-03b08977-4b20-4bac-b48b-06ba5df4e579" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1246.433851] env[61978]: DEBUG nova.compute.manager [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Instance network_info: |[{"id": "76c38b35-52ad-477c-9dfd-e1c64c8a7889", "address": "fa:16:3e:9f:0a:d0", "network": {"id": "61175c4b-0b5d-4186-97ac-7a615b95ef28", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-341600036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d4d29d9b6a74b4887684c7b310280b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76c38b35-52", "ovs_interfaceid": "76c38b35-52ad-477c-9dfd-e1c64c8a7889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1246.434347] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:0a:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76c38b35-52ad-477c-9dfd-e1c64c8a7889', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1246.441888] env[61978]: DEBUG oslo.service.loopingcall [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1246.442150] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1246.442435] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f785c639-b681-466b-b866-a5b7d60bad5a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.464059] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1246.464059] env[61978]: value = "task-1395779" [ 1246.464059] env[61978]: _type = "Task" [ 1246.464059] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.472611] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395779, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.519642] env[61978]: DEBUG nova.scheduler.client.report [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1246.707051] env[61978]: DEBUG nova.compute.manager [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Stashing vm_state: active {{(pid=61978) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1246.761293] env[61978]: DEBUG nova.compute.manager [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Received event network-changed-47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1246.762920] env[61978]: DEBUG nova.compute.manager [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Refreshing instance network info cache due to event network-changed-47785e4d-5976-42da-b954-01d1e5ec6d75. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1246.763269] env[61978]: DEBUG oslo_concurrency.lockutils [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] Acquiring lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.763691] env[61978]: DEBUG oslo_concurrency.lockutils [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] Acquired lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.763974] env[61978]: DEBUG nova.network.neutron [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Refreshing network info cache for port 47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1246.779953] env[61978]: DEBUG nova.compute.manager [req-aa1253bd-4937-499e-befb-b937ce5f647f req-a449ebc1-93bc-45cd-bc6d-5bf200e204c4 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received event network-changed-759bb051-5bd1-42fb-896d-ced73e9f67a1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1246.779953] env[61978]: DEBUG nova.compute.manager [req-aa1253bd-4937-499e-befb-b937ce5f647f req-a449ebc1-93bc-45cd-bc6d-5bf200e204c4 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Refreshing instance network info cache due to event network-changed-759bb051-5bd1-42fb-896d-ced73e9f67a1. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1246.779953] env[61978]: DEBUG oslo_concurrency.lockutils [req-aa1253bd-4937-499e-befb-b937ce5f647f req-a449ebc1-93bc-45cd-bc6d-5bf200e204c4 service nova] Acquiring lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.780324] env[61978]: DEBUG oslo_concurrency.lockutils [req-aa1253bd-4937-499e-befb-b937ce5f647f req-a449ebc1-93bc-45cd-bc6d-5bf200e204c4 service nova] Acquired lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.780324] env[61978]: DEBUG nova.network.neutron [req-aa1253bd-4937-499e-befb-b937ce5f647f req-a449ebc1-93bc-45cd-bc6d-5bf200e204c4 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Refreshing network info cache for port 759bb051-5bd1-42fb-896d-ced73e9f67a1 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1246.973459] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395779, 'name': CreateVM_Task, 'duration_secs': 0.507604} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.973828] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1246.974321] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.974501] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.974826] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1246.975114] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e51dcb83-9205-47e9-aaff-d147bd71b0ec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.980026] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1246.980026] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52676f0e-8c68-729c-a636-d7a5a8f17f3e" [ 1246.980026] env[61978]: _type = "Task" [ 1246.980026] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.987573] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52676f0e-8c68-729c-a636-d7a5a8f17f3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.025604] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.026397] env[61978]: DEBUG nova.compute.manager [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1247.030818] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.090s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.195114] env[61978]: DEBUG nova.network.neutron [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Port d2d39b09-4acd-4f24-aa07-31e86f78f134 binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1247.229767] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.502595] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52676f0e-8c68-729c-a636-d7a5a8f17f3e, 'name': SearchDatastore_Task, 'duration_secs': 0.009141} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.503342] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1247.503592] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1247.503835] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1247.503989] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.504207] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1247.504484] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ee6e07c-341d-4e34-b78e-32d25ada8248 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.519127] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1247.519293] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1247.520073] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5eb68ba4-239d-4dbc-aecb-1dbb576d90cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.526305] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1247.526305] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c77d9f-6826-6a9c-baec-e3aa9d64b031" [ 1247.526305] env[61978]: _type = "Task" [ 1247.526305] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.536310] env[61978]: DEBUG nova.compute.utils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1247.543208] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c77d9f-6826-6a9c-baec-e3aa9d64b031, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.545465] env[61978]: DEBUG nova.network.neutron [req-aa1253bd-4937-499e-befb-b937ce5f647f req-a449ebc1-93bc-45cd-bc6d-5bf200e204c4 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updated VIF entry in instance network info cache for port 759bb051-5bd1-42fb-896d-ced73e9f67a1. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1247.545898] env[61978]: DEBUG nova.network.neutron [req-aa1253bd-4937-499e-befb-b937ce5f647f req-a449ebc1-93bc-45cd-bc6d-5bf200e204c4 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updating instance_info_cache with network_info: [{"id": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "address": "fa:16:3e:14:c6:36", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5e24a1-8e", "ovs_interfaceid": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "45cdb631-5f61-4991-973e-3ba5b5ff0820", "address": "fa:16:3e:ef:97:76", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45cdb631-5f", "ovs_interfaceid": "45cdb631-5f61-4991-973e-3ba5b5ff0820", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "759bb051-5bd1-42fb-896d-ced73e9f67a1", "address": "fa:16:3e:b1:58:6f", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap759bb051-5b", "ovs_interfaceid": "759bb051-5bd1-42fb-896d-ced73e9f67a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.547387] env[61978]: DEBUG nova.compute.manager [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1247.547645] env[61978]: DEBUG nova.network.neutron [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1247.596126] env[61978]: DEBUG nova.policy [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '863cb767fc5246cc8d7eb7e74403c7c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f93bebf4ffee4172ab32b91becad7a05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1247.640939] env[61978]: DEBUG nova.network.neutron [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updated VIF entry in instance network info cache for port 47785e4d-5976-42da-b954-01d1e5ec6d75. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1247.641449] env[61978]: DEBUG nova.network.neutron [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updating instance_info_cache with network_info: [{"id": "47785e4d-5976-42da-b954-01d1e5ec6d75", "address": "fa:16:3e:06:c3:3b", "network": {"id": "b5590b99-32cd-425d-be74-a3ce197f3da6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1851734399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6968cd62406944afad1081b2558d4949", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ad8894f-e240-4013-8272-4e79daea0751", "external-id": "nsx-vlan-transportzone-204", "segmentation_id": 204, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47785e4d-59", "ovs_interfaceid": "47785e4d-5976-42da-b954-01d1e5ec6d75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.814180] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "17c56c1c-9992-4559-ad23-c68909ae6792" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.814180] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "17c56c1c-9992-4559-ad23-c68909ae6792" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.814180] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "17c56c1c-9992-4559-ad23-c68909ae6792-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.814376] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "17c56c1c-9992-4559-ad23-c68909ae6792-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.814616] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "17c56c1c-9992-4559-ad23-c68909ae6792-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.818194] env[61978]: INFO nova.compute.manager [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Terminating instance [ 1247.821449] env[61978]: DEBUG nova.compute.manager [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1247.821661] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1247.822584] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91aa8a75-0a12-420a-b540-2239c7a322ad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.830951] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1247.830951] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d57de9ba-e842-4449-a091-a54a8530fa23 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.837045] env[61978]: DEBUG oslo_vmware.api [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1247.837045] env[61978]: value = "task-1395780" [ 1247.837045] env[61978]: _type = "Task" [ 1247.837045] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.845689] env[61978]: DEBUG oslo_vmware.api [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395780, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.877417] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquiring lock "27713bbd-1234-44ae-8520-78d85baaae12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.877963] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Lock "27713bbd-1234-44ae-8520-78d85baaae12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.878240] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquiring lock "27713bbd-1234-44ae-8520-78d85baaae12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.878469] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Lock "27713bbd-1234-44ae-8520-78d85baaae12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.878654] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Lock "27713bbd-1234-44ae-8520-78d85baaae12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.881687] env[61978]: INFO nova.compute.manager [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Terminating instance [ 1247.887530] env[61978]: DEBUG nova.compute.manager [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1247.887801] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1247.888425] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquiring lock "de8abe58-e0c2-4eaf-b3a6-7106e0861080" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.888722] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lock "de8abe58-e0c2-4eaf-b3a6-7106e0861080" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.890428] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e4ceef-1c7b-453e-bdec-1df9c6013ac4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.900377] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1247.900377] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a065dde-418a-48b7-9a7e-98e97a700185 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.904662] env[61978]: DEBUG nova.network.neutron [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Successfully created port: 41133564-b2c5-468a-aafc-5e11f8388a94 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1247.908257] env[61978]: DEBUG oslo_vmware.api [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1247.908257] env[61978]: value = "task-1395781" [ 1247.908257] env[61978]: _type = "Task" [ 1247.908257] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.921024] env[61978]: DEBUG oslo_vmware.api [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395781, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.040155] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c77d9f-6826-6a9c-baec-e3aa9d64b031, 'name': SearchDatastore_Task, 'duration_secs': 0.008729} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.041354] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7084762-a339-49c7-9f4f-2c81b11255ed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.046868] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1248.046868] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5209cd2f-ecde-2cd9-7762-f9a491aa9ca6" [ 1248.046868] env[61978]: _type = "Task" [ 1248.046868] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.050243] env[61978]: DEBUG nova.compute.manager [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1248.053691] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Applying migration context for instance d3c82821-0617-4de6-8109-813a67910ed1 as it has an incoming, in-progress migration 984cd406-40bb-41ab-8948-b39be4277799. Migration status is post-migrating {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1248.055179] env[61978]: INFO nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating resource usage from migration e6b37eeb-7505-4485-ba8d-f4b9a4c26958 [ 1248.055910] env[61978]: INFO nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating resource usage from migration 984cd406-40bb-41ab-8948-b39be4277799 [ 1248.057755] env[61978]: DEBUG oslo_concurrency.lockutils [req-aa1253bd-4937-499e-befb-b937ce5f647f req-a449ebc1-93bc-45cd-bc6d-5bf200e204c4 service nova] Releasing lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.064015] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5209cd2f-ecde-2cd9-7762-f9a491aa9ca6, 'name': SearchDatastore_Task, 'duration_secs': 0.009251} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.064373] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.067284] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 03b08977-4b20-4bac-b48b-06ba5df4e579/03b08977-4b20-4bac-b48b-06ba5df4e579.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1248.067284] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70df7bb7-cca6-4744-8904-215c074c1ddd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.076094] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1248.076094] env[61978]: value = "task-1395783" [ 1248.076094] env[61978]: _type = "Task" [ 1248.076094] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.088308] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395783, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.089626] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 243e7146-46fc-43f4-a83b-cdc58f397f9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1248.089761] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1248.089908] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 17c56c1c-9992-4559-ad23-c68909ae6792 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1248.090153] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b76dd94e-c14b-48d4-bb7f-020313412ca2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1248.090153] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance e9b70b36-d0d8-430e-a5e7-588d3c75d7ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1248.090364] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 27713bbd-1234-44ae-8520-78d85baaae12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1248.090364] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f33d00ec-72b7-43f2-bc0d-320e3219ae47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1248.090495] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 4d357d46-8bbb-4228-a5a6-2ce67fe037d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1248.090708] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Migration 984cd406-40bb-41ab-8948-b39be4277799 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1248.090804] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance d3c82821-0617-4de6-8109-813a67910ed1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1248.090867] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 03b08977-4b20-4bac-b48b-06ba5df4e579 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1248.091032] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f9b57cf4-f2e4-4d2a-9bd4-74952d46876d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1248.091132] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Migration e6b37eeb-7505-4485-ba8d-f4b9a4c26958 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1248.091318] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1248.144274] env[61978]: DEBUG oslo_concurrency.lockutils [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] Releasing lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.144738] env[61978]: DEBUG nova.compute.manager [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Received event network-changed-76c38b35-52ad-477c-9dfd-e1c64c8a7889 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1248.145030] env[61978]: DEBUG nova.compute.manager [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Refreshing instance network info cache due to event network-changed-76c38b35-52ad-477c-9dfd-e1c64c8a7889. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1248.145740] env[61978]: DEBUG oslo_concurrency.lockutils [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] Acquiring lock "refresh_cache-03b08977-4b20-4bac-b48b-06ba5df4e579" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.145740] env[61978]: DEBUG oslo_concurrency.lockutils [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] Acquired lock "refresh_cache-03b08977-4b20-4bac-b48b-06ba5df4e579" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.145740] env[61978]: DEBUG nova.network.neutron [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Refreshing network info cache for port 76c38b35-52ad-477c-9dfd-e1c64c8a7889 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1248.224677] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "d3c82821-0617-4de6-8109-813a67910ed1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.225026] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "d3c82821-0617-4de6-8109-813a67910ed1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.225281] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "d3c82821-0617-4de6-8109-813a67910ed1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.347153] env[61978]: DEBUG oslo_vmware.api [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395780, 'name': PowerOffVM_Task, 'duration_secs': 0.359701} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.347575] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1248.347854] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1248.348195] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf77f9d5-17b3-4743-bae5-006c1a3495ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.395134] env[61978]: DEBUG nova.compute.manager [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1248.426376] env[61978]: DEBUG oslo_vmware.api [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395781, 'name': PowerOffVM_Task, 'duration_secs': 0.23183} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.426882] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1248.431053] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1248.431053] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07fe13f9-53c3-418d-92ff-e48500abb85d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.495347] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1248.495696] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1248.495903] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Deleting the datastore file [datastore2] 17c56c1c-9992-4559-ad23-c68909ae6792 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1248.496959] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-377acd51-7602-4cc4-b73a-f55f6d9e5225 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.504169] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1248.504444] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1248.508031] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Deleting the datastore file [datastore2] 27713bbd-1234-44ae-8520-78d85baaae12 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1248.508031] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ad8007f-72b2-462b-8e4d-f63f7c63135b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.509825] env[61978]: DEBUG oslo_vmware.api [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for the task: (returnval){ [ 1248.509825] env[61978]: value = "task-1395787" [ 1248.509825] env[61978]: _type = "Task" [ 1248.509825] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.511569] env[61978]: DEBUG oslo_vmware.api [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for the task: (returnval){ [ 1248.511569] env[61978]: value = "task-1395788" [ 1248.511569] env[61978]: _type = "Task" [ 1248.511569] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.528109] env[61978]: DEBUG oslo_vmware.api [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.528352] env[61978]: DEBUG oslo_vmware.api [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395787, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.586842] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395783, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.597428] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance de8abe58-e0c2-4eaf-b3a6-7106e0861080 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1248.597428] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1248.597428] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3072MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1248.642263] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "interface-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-45cdb631-5f61-4991-973e-3ba5b5ff0820" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.642382] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-45cdb631-5f61-4991-973e-3ba5b5ff0820" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.811450] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be93ab4-f5fd-4eda-8845-e96f4485c01d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.820672] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508ed88b-1689-4cf3-b730-52ae982c8a62 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.854247] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2fd425-57e9-4da9-8edf-199381e4cc33 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.865418] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ddc8ae-5c53-4da2-a900-c57616f78fc8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.880470] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1248.915416] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.024256] env[61978]: DEBUG oslo_vmware.api [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Task: {'id': task-1395787, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216107} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.024977] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1249.025208] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1249.025401] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1249.025596] env[61978]: INFO nova.compute.manager [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1249.025861] env[61978]: DEBUG oslo.service.loopingcall [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1249.026067] env[61978]: DEBUG nova.compute.manager [-] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1249.026171] env[61978]: DEBUG nova.network.neutron [-] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1249.030733] env[61978]: DEBUG oslo_vmware.api [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Task: {'id': task-1395788, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.25934} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.031276] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1249.031458] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1249.031642] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1249.031814] env[61978]: INFO nova.compute.manager [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1249.032056] env[61978]: DEBUG oslo.service.loopingcall [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1249.032266] env[61978]: DEBUG nova.compute.manager [-] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1249.032360] env[61978]: DEBUG nova.network.neutron [-] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1249.062164] env[61978]: DEBUG nova.network.neutron [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Updated VIF entry in instance network info cache for port 76c38b35-52ad-477c-9dfd-e1c64c8a7889. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1249.062583] env[61978]: DEBUG nova.network.neutron [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Updating instance_info_cache with network_info: [{"id": "76c38b35-52ad-477c-9dfd-e1c64c8a7889", "address": "fa:16:3e:9f:0a:d0", "network": {"id": "61175c4b-0b5d-4186-97ac-7a615b95ef28", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-341600036-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d4d29d9b6a74b4887684c7b310280b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76c38b35-52", "ovs_interfaceid": "76c38b35-52ad-477c-9dfd-e1c64c8a7889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.065485] env[61978]: DEBUG nova.compute.manager [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1249.096698] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395783, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514311} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.097056] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 03b08977-4b20-4bac-b48b-06ba5df4e579/03b08977-4b20-4bac-b48b-06ba5df4e579.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1249.097321] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1249.097884] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bcb55668-1cf9-4ab1-80ed-d41abc5d9502 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.103568] env[61978]: DEBUG nova.virt.hardware [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1249.103821] env[61978]: DEBUG nova.virt.hardware [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1249.104616] env[61978]: DEBUG nova.virt.hardware [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1249.104616] env[61978]: DEBUG nova.virt.hardware [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1249.104616] env[61978]: DEBUG nova.virt.hardware [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1249.104794] env[61978]: DEBUG nova.virt.hardware [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1249.104987] env[61978]: DEBUG nova.virt.hardware [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1249.105885] env[61978]: DEBUG nova.virt.hardware [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1249.105885] env[61978]: DEBUG nova.virt.hardware [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1249.105885] env[61978]: DEBUG nova.virt.hardware [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1249.105885] env[61978]: DEBUG nova.virt.hardware [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1249.106913] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf8f092-33a8-4789-ac76-2adb252915c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.113062] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1249.113062] env[61978]: value = "task-1395789" [ 1249.113062] env[61978]: _type = "Task" [ 1249.113062] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.125480] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291802be-fd3d-4388-bf19-8c9e1bad1ee2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.134278] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395789, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.147786] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1249.147976] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.150522] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e04a9c-5898-4c3b-b84b-4fd5d1a569ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.170396] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a69f7d-a27d-4069-b6be-740ab39f0cd2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.203121] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Reconfiguring VM to detach interface {{(pid=61978) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1249.204070] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9831d00-a564-455c-9554-29381cb153b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.223068] env[61978]: DEBUG oslo_vmware.api [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1249.223068] env[61978]: value = "task-1395790" [ 1249.223068] env[61978]: _type = "Task" [ 1249.223068] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.236566] env[61978]: DEBUG oslo_vmware.api [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395790, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.269225] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1249.269505] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.269711] env[61978]: DEBUG nova.network.neutron [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1249.363386] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "b76dd94e-c14b-48d4-bb7f-020313412ca2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.363386] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "b76dd94e-c14b-48d4-bb7f-020313412ca2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.363386] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "b76dd94e-c14b-48d4-bb7f-020313412ca2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.363386] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "b76dd94e-c14b-48d4-bb7f-020313412ca2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.363386] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "b76dd94e-c14b-48d4-bb7f-020313412ca2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.366264] env[61978]: INFO nova.compute.manager [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Terminating instance [ 1249.371446] env[61978]: DEBUG nova.compute.manager [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1249.371446] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1249.371446] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71cb54ea-a782-4301-a78e-75cad66d974c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.378302] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1249.378696] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd189d8e-4494-4ef0-95f6-4623ddbae07d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.383024] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1249.389456] env[61978]: DEBUG oslo_vmware.api [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1249.389456] env[61978]: value = "task-1395791" [ 1249.389456] env[61978]: _type = "Task" [ 1249.389456] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.402156] env[61978]: DEBUG oslo_vmware.api [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395791, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.525257] env[61978]: DEBUG nova.compute.manager [req-5139b41c-1639-4a9f-b102-33f6fa65a9c0 req-d64117fe-b574-419f-9942-e295f9fadf21 service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Received event network-vif-deleted-47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1249.525257] env[61978]: INFO nova.compute.manager [req-5139b41c-1639-4a9f-b102-33f6fa65a9c0 req-d64117fe-b574-419f-9942-e295f9fadf21 service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Neutron deleted interface 47785e4d-5976-42da-b954-01d1e5ec6d75; detaching it from the instance and deleting it from the info cache [ 1249.525257] env[61978]: DEBUG nova.network.neutron [req-5139b41c-1639-4a9f-b102-33f6fa65a9c0 req-d64117fe-b574-419f-9942-e295f9fadf21 service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.568979] env[61978]: DEBUG oslo_concurrency.lockutils [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] Releasing lock "refresh_cache-03b08977-4b20-4bac-b48b-06ba5df4e579" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1249.568979] env[61978]: DEBUG nova.compute.manager [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Received event network-changed-47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1249.568979] env[61978]: DEBUG nova.compute.manager [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Refreshing instance network info cache due to event network-changed-47785e4d-5976-42da-b954-01d1e5ec6d75. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1249.568979] env[61978]: DEBUG oslo_concurrency.lockutils [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] Acquiring lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1249.568979] env[61978]: DEBUG oslo_concurrency.lockutils [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] Acquired lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.569180] env[61978]: DEBUG nova.network.neutron [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Refreshing network info cache for port 47785e4d-5976-42da-b954-01d1e5ec6d75 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1249.595818] env[61978]: DEBUG nova.compute.manager [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Stashing vm_state: active {{(pid=61978) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1249.617657] env[61978]: DEBUG oslo_concurrency.lockutils [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "e9b70b36-d0d8-430e-a5e7-588d3c75d7ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.617959] env[61978]: DEBUG oslo_concurrency.lockutils [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "e9b70b36-d0d8-430e-a5e7-588d3c75d7ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.618028] env[61978]: DEBUG oslo_concurrency.lockutils [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "e9b70b36-d0d8-430e-a5e7-588d3c75d7ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.618237] env[61978]: DEBUG oslo_concurrency.lockutils [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "e9b70b36-d0d8-430e-a5e7-588d3c75d7ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.618421] env[61978]: DEBUG oslo_concurrency.lockutils [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "e9b70b36-d0d8-430e-a5e7-588d3c75d7ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.623716] env[61978]: INFO nova.compute.manager [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Terminating instance [ 1249.625704] env[61978]: DEBUG nova.compute.manager [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1249.625906] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1249.627135] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227337df-dff8-4a61-8123-bf7571acbef3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.633210] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395789, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.52001} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.633938] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1249.634963] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a57896c-a0d4-4aea-bc3e-2b0a30a93895 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.639591] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1249.640280] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-873decb0-5ebf-4ca2-9755-1db71b828a47 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.663802] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 03b08977-4b20-4bac-b48b-06ba5df4e579/03b08977-4b20-4bac-b48b-06ba5df4e579.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1249.665222] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b4b2167-51df-45ee-8547-0a3f17420c16 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.684204] env[61978]: DEBUG oslo_vmware.api [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1249.684204] env[61978]: value = "task-1395792" [ 1249.684204] env[61978]: _type = "Task" [ 1249.684204] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.699999] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1249.699999] env[61978]: value = "task-1395793" [ 1249.699999] env[61978]: _type = "Task" [ 1249.699999] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.700610] env[61978]: DEBUG oslo_vmware.api [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395792, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.709101] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395793, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.733454] env[61978]: DEBUG oslo_vmware.api [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.832650] env[61978]: DEBUG nova.network.neutron [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Successfully updated port: 41133564-b2c5-468a-aafc-5e11f8388a94 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1249.891519] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1249.891822] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.863s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.892201] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.663s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.906022] env[61978]: DEBUG oslo_vmware.api [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395791, 'name': PowerOffVM_Task, 'duration_secs': 0.363553} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.906022] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1249.906022] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1249.906022] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ced8909e-0962-44e3-affe-416c562a7430 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.972162] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1249.972442] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1249.972643] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Deleting the datastore file [datastore1] b76dd94e-c14b-48d4-bb7f-020313412ca2 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1249.972916] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-465c439e-3b1a-492c-b55f-c3c5ba1bf70b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.979827] env[61978]: DEBUG oslo_vmware.api [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1249.979827] env[61978]: value = "task-1395795" [ 1249.979827] env[61978]: _type = "Task" [ 1249.979827] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.988675] env[61978]: DEBUG oslo_vmware.api [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395795, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.998176] env[61978]: DEBUG nova.network.neutron [-] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.003225] env[61978]: DEBUG nova.network.neutron [-] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.027870] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-90602431-8297-486e-9913-0f1a197332f3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.040093] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152e197e-84df-4adc-828f-228aa28210d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.057899] env[61978]: DEBUG nova.network.neutron [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance_info_cache with network_info: [{"id": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "address": "fa:16:3e:3b:95:21", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2d39b09-4a", "ovs_interfaceid": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.083718] env[61978]: DEBUG nova.compute.manager [req-5139b41c-1639-4a9f-b102-33f6fa65a9c0 req-d64117fe-b574-419f-9942-e295f9fadf21 service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Detach interface failed, port_id=47785e4d-5976-42da-b954-01d1e5ec6d75, reason: Instance 27713bbd-1234-44ae-8520-78d85baaae12 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1250.099217] env[61978]: DEBUG nova.network.neutron [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1250.124661] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.199481] env[61978]: DEBUG oslo_vmware.api [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395792, 'name': PowerOffVM_Task, 'duration_secs': 0.253238} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.199599] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1250.202020] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1250.202020] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc42837f-a8de-425a-9fd8-97818ec24c0e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.209349] env[61978]: DEBUG nova.network.neutron [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.215596] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395793, 'name': ReconfigVM_Task, 'duration_secs': 0.355418} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.216172] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 03b08977-4b20-4bac-b48b-06ba5df4e579/03b08977-4b20-4bac-b48b-06ba5df4e579.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1250.216898] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-59e9fe49-82b6-40a6-bdf4-ed76a67e5a5d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.229112] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1250.229112] env[61978]: value = "task-1395797" [ 1250.229112] env[61978]: _type = "Task" [ 1250.229112] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.236722] env[61978]: DEBUG oslo_vmware.api [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.242975] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395797, 'name': Rename_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.278426] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1250.280037] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1250.280037] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Deleting the datastore file [datastore1] e9b70b36-d0d8-430e-a5e7-588d3c75d7ff {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1250.280037] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd188e8a-6f40-4515-91e9-df22d6307d98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.290210] env[61978]: DEBUG oslo_vmware.api [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for the task: (returnval){ [ 1250.290210] env[61978]: value = "task-1395798" [ 1250.290210] env[61978]: _type = "Task" [ 1250.290210] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.297888] env[61978]: DEBUG oslo_vmware.api [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395798, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.335133] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Acquiring lock "refresh_cache-f9b57cf4-f2e4-4d2a-9bd4-74952d46876d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1250.335310] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Acquired lock "refresh_cache-f9b57cf4-f2e4-4d2a-9bd4-74952d46876d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.335430] env[61978]: DEBUG nova.network.neutron [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1250.404158] env[61978]: INFO nova.compute.claims [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1250.491707] env[61978]: DEBUG oslo_vmware.api [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395795, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160076} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.491707] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1250.491707] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1250.491707] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1250.491707] env[61978]: INFO nova.compute.manager [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1250.491707] env[61978]: DEBUG oslo.service.loopingcall [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1250.492057] env[61978]: DEBUG nova.compute.manager [-] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1250.492057] env[61978]: DEBUG nova.network.neutron [-] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1250.501423] env[61978]: INFO nova.compute.manager [-] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Took 1.47 seconds to deallocate network for instance. [ 1250.511023] env[61978]: INFO nova.compute.manager [-] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Took 1.48 seconds to deallocate network for instance. [ 1250.563521] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1250.717055] env[61978]: DEBUG oslo_concurrency.lockutils [req-5832680d-1b03-4a1c-80f1-7bb1343bc459 req-1a17d490-2bdb-45c9-8925-ea826726b03e service nova] Releasing lock "refresh_cache-27713bbd-1234-44ae-8520-78d85baaae12" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1250.736566] env[61978]: DEBUG oslo_vmware.api [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.741829] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395797, 'name': Rename_Task, 'duration_secs': 0.236411} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.742135] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1250.742391] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5be6ff81-eaed-4a5a-8d8a-aa271ac5294f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.749429] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1250.749429] env[61978]: value = "task-1395799" [ 1250.749429] env[61978]: _type = "Task" [ 1250.749429] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.760521] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395799, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.800863] env[61978]: DEBUG oslo_vmware.api [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Task: {'id': task-1395798, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138207} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.800863] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1250.801030] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1250.801221] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1250.801415] env[61978]: INFO nova.compute.manager [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1250.803105] env[61978]: DEBUG oslo.service.loopingcall [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1250.803105] env[61978]: DEBUG nova.compute.manager [-] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1250.803105] env[61978]: DEBUG nova.network.neutron [-] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1250.909095] env[61978]: INFO nova.compute.resource_tracker [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating resource usage from migration e6b37eeb-7505-4485-ba8d-f4b9a4c26958 [ 1250.931778] env[61978]: DEBUG nova.network.neutron [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1251.009881] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1251.018060] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1251.080101] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501dda9e-bfc0-45c7-905d-9929050603ae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.091694] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e74fc8-8191-4bc0-87a4-a4fa6fd5052c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.196298] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7964d434-3efd-44e1-ad17-716deb530644 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.206721] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f17479-01e9-4259-bc0e-013f770fde2f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.244781] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a837fd-b161-423c-9a5b-1d2f38b6047a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.262183] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e700cb3b-f34e-42a3-9a72-6ace5e75fb80 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.266690] env[61978]: DEBUG oslo_vmware.api [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.272519] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395799, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.281667] env[61978]: DEBUG nova.compute.provider_tree [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1251.475927] env[61978]: DEBUG nova.network.neutron [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Updating instance_info_cache with network_info: [{"id": "41133564-b2c5-468a-aafc-5e11f8388a94", "address": "fa:16:3e:03:6f:75", "network": {"id": "b1612066-ed83-4ce3-be8c-708863f0a685", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-806205852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93bebf4ffee4172ab32b91becad7a05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41133564-b2", "ovs_interfaceid": "41133564-b2c5-468a-aafc-5e11f8388a94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.635850] env[61978]: DEBUG nova.compute.manager [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Received event network-vif-deleted-bba3eeec-259f-4ea3-b0f6-e509a29d33f4 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1251.635850] env[61978]: DEBUG nova.compute.manager [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Received event network-vif-plugged-41133564-b2c5-468a-aafc-5e11f8388a94 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1251.635850] env[61978]: DEBUG oslo_concurrency.lockutils [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] Acquiring lock "f9b57cf4-f2e4-4d2a-9bd4-74952d46876d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1251.636261] env[61978]: DEBUG oslo_concurrency.lockutils [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] Lock "f9b57cf4-f2e4-4d2a-9bd4-74952d46876d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1251.636610] env[61978]: DEBUG oslo_concurrency.lockutils [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] Lock "f9b57cf4-f2e4-4d2a-9bd4-74952d46876d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1251.636917] env[61978]: DEBUG nova.compute.manager [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] No waiting events found dispatching network-vif-plugged-41133564-b2c5-468a-aafc-5e11f8388a94 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1251.637783] env[61978]: WARNING nova.compute.manager [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Received unexpected event network-vif-plugged-41133564-b2c5-468a-aafc-5e11f8388a94 for instance with vm_state building and task_state spawning. [ 1251.638129] env[61978]: DEBUG nova.compute.manager [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Received event network-changed-41133564-b2c5-468a-aafc-5e11f8388a94 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1251.638431] env[61978]: DEBUG nova.compute.manager [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Refreshing instance network info cache due to event network-changed-41133564-b2c5-468a-aafc-5e11f8388a94. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1251.639044] env[61978]: DEBUG oslo_concurrency.lockutils [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] Acquiring lock "refresh_cache-f9b57cf4-f2e4-4d2a-9bd4-74952d46876d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1251.758070] env[61978]: DEBUG oslo_vmware.api [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.769117] env[61978]: DEBUG oslo_vmware.api [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395799, 'name': PowerOnVM_Task, 'duration_secs': 0.710174} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.769488] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1251.769848] env[61978]: INFO nova.compute.manager [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Took 8.68 seconds to spawn the instance on the hypervisor. [ 1251.770123] env[61978]: DEBUG nova.compute.manager [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1251.771988] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07589c0e-e6f8-4e83-9420-83e487b8dec0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.785962] env[61978]: DEBUG nova.scheduler.client.report [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1251.817034] env[61978]: DEBUG nova.network.neutron [-] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.979784] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Releasing lock "refresh_cache-f9b57cf4-f2e4-4d2a-9bd4-74952d46876d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.980231] env[61978]: DEBUG nova.compute.manager [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Instance network_info: |[{"id": "41133564-b2c5-468a-aafc-5e11f8388a94", "address": "fa:16:3e:03:6f:75", "network": {"id": "b1612066-ed83-4ce3-be8c-708863f0a685", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-806205852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93bebf4ffee4172ab32b91becad7a05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41133564-b2", "ovs_interfaceid": "41133564-b2c5-468a-aafc-5e11f8388a94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1251.980610] env[61978]: DEBUG oslo_concurrency.lockutils [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] Acquired lock "refresh_cache-f9b57cf4-f2e4-4d2a-9bd4-74952d46876d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.980952] env[61978]: DEBUG nova.network.neutron [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Refreshing network info cache for port 41133564-b2c5-468a-aafc-5e11f8388a94 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1251.982589] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:6f:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1d25020-c621-4388-ac1d-de55bfefbe50', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41133564-b2c5-468a-aafc-5e11f8388a94', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1251.997237] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Creating folder: Project (f93bebf4ffee4172ab32b91becad7a05). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1252.001490] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de6845df-08e1-4ec8-8836-62aac63bad6f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.014246] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Created folder: Project (f93bebf4ffee4172ab32b91becad7a05) in parent group-v295764. [ 1252.014422] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Creating folder: Instances. Parent ref: group-v296018. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1252.014673] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a022e9d2-dd35-4caa-889a-5ef13fb2f7b2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.025895] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Created folder: Instances in parent group-v296018. [ 1252.026248] env[61978]: DEBUG oslo.service.loopingcall [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1252.026508] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1252.026797] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02245d13-7d48-4f07-9a7f-2c84aec5fb01 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.047622] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1252.047622] env[61978]: value = "task-1395803" [ 1252.047622] env[61978]: _type = "Task" [ 1252.047622] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.055816] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395803, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.072380] env[61978]: DEBUG nova.network.neutron [-] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.221983] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd1d4ed-c905-4bd8-931a-d8c9ed3b109e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.248881] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf46a170-45b6-4787-8b6f-a5fdc1712b52 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.256422] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance 'd3c82821-0617-4de6-8109-813a67910ed1' progress to 83 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1252.262793] env[61978]: DEBUG oslo_vmware.api [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.295595] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.400s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.295595] env[61978]: INFO nova.compute.manager [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Migrating [ 1252.303478] env[61978]: INFO nova.compute.manager [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Took 29.34 seconds to build instance. [ 1252.307427] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.392s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.312084] env[61978]: INFO nova.compute.claims [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1252.324627] env[61978]: INFO nova.compute.manager [-] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Took 1.83 seconds to deallocate network for instance. [ 1252.415425] env[61978]: DEBUG nova.network.neutron [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Updated VIF entry in instance network info cache for port 41133564-b2c5-468a-aafc-5e11f8388a94. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1252.415879] env[61978]: DEBUG nova.network.neutron [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Updating instance_info_cache with network_info: [{"id": "41133564-b2c5-468a-aafc-5e11f8388a94", "address": "fa:16:3e:03:6f:75", "network": {"id": "b1612066-ed83-4ce3-be8c-708863f0a685", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-806205852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93bebf4ffee4172ab32b91becad7a05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41133564-b2", "ovs_interfaceid": "41133564-b2c5-468a-aafc-5e11f8388a94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.557020] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395803, 'name': CreateVM_Task, 'duration_secs': 0.375772} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.557214] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1252.557920] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1252.558111] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.558453] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1252.559129] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d31c679d-329a-4d2b-a77b-558448a1de67 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.563351] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Waiting for the task: (returnval){ [ 1252.563351] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52df7f3e-979a-6710-b56e-24f94a1d4ae6" [ 1252.563351] env[61978]: _type = "Task" [ 1252.563351] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.571145] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52df7f3e-979a-6710-b56e-24f94a1d4ae6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.575813] env[61978]: INFO nova.compute.manager [-] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Took 1.77 seconds to deallocate network for instance. [ 1252.755360] env[61978]: DEBUG oslo_vmware.api [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.767668] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1252.767992] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75c8aec0-6b9a-4cea-95ee-d710f1587cff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.776128] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1252.776128] env[61978]: value = "task-1395804" [ 1252.776128] env[61978]: _type = "Task" [ 1252.776128] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.784577] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395804, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.818612] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55af4634-c12a-4dae-ad91-539a4187a4ef tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "03b08977-4b20-4bac-b48b-06ba5df4e579" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.867s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.827452] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1252.827885] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.828209] env[61978]: DEBUG nova.network.neutron [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1252.830366] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.919043] env[61978]: DEBUG oslo_concurrency.lockutils [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] Releasing lock "refresh_cache-f9b57cf4-f2e4-4d2a-9bd4-74952d46876d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1252.919191] env[61978]: DEBUG nova.compute.manager [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Received event network-vif-deleted-58e26b61-334b-4383-b787-c9cb140c549e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1252.919402] env[61978]: INFO nova.compute.manager [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Neutron deleted interface 58e26b61-334b-4383-b787-c9cb140c549e; detaching it from the instance and deleting it from the info cache [ 1252.919578] env[61978]: DEBUG nova.network.neutron [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.075085] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52df7f3e-979a-6710-b56e-24f94a1d4ae6, 'name': SearchDatastore_Task, 'duration_secs': 0.008557} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.075468] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1253.075854] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1253.076158] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1253.076323] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.076617] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1253.076945] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-977cf8b5-0208-48c9-b52a-4e6dc08a6532 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.081546] env[61978]: DEBUG oslo_concurrency.lockutils [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1253.085658] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1253.085907] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1253.086704] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83de882c-39fd-49e4-86cb-3efbe34d17e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.092020] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Waiting for the task: (returnval){ [ 1253.092020] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5295c778-c337-cbf8-cec9-1a1a72d2d4a3" [ 1253.092020] env[61978]: _type = "Task" [ 1253.092020] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.101834] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5295c778-c337-cbf8-cec9-1a1a72d2d4a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.114686] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "03b08977-4b20-4bac-b48b-06ba5df4e579" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1253.115071] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "03b08977-4b20-4bac-b48b-06ba5df4e579" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.115294] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "03b08977-4b20-4bac-b48b-06ba5df4e579-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1253.115488] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "03b08977-4b20-4bac-b48b-06ba5df4e579-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.115671] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "03b08977-4b20-4bac-b48b-06ba5df4e579-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.119505] env[61978]: INFO nova.compute.manager [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Terminating instance [ 1253.121112] env[61978]: DEBUG nova.compute.manager [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1253.121532] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1253.122166] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f0efca-4b76-4411-ad89-231d179550ad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.129877] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1253.130197] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2929cc04-d86d-4da5-82d3-2fdfd1d6a16c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.137247] env[61978]: DEBUG oslo_vmware.api [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1253.137247] env[61978]: value = "task-1395806" [ 1253.137247] env[61978]: _type = "Task" [ 1253.137247] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.144799] env[61978]: DEBUG oslo_vmware.api [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395806, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.254080] env[61978]: DEBUG oslo_vmware.api [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.286329] env[61978]: DEBUG oslo_vmware.api [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395804, 'name': PowerOnVM_Task, 'duration_secs': 0.476064} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.286703] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1253.286805] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0b83c4e7-90a0-4dfa-935a-0d10ad578e58 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance 'd3c82821-0617-4de6-8109-813a67910ed1' progress to 100 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1253.424678] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc215b15-991a-4171-ae08-245c3ae18a28 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.436162] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1821693f-7e70-40d0-9120-d33b1130fbd5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.475071] env[61978]: DEBUG nova.compute.manager [req-afe143dc-dbfa-4dd0-9078-7b305d194882 req-2b4e40f5-c323-41e8-8bfb-41af00ca662a service nova] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Detach interface failed, port_id=58e26b61-334b-4383-b787-c9cb140c549e, reason: Instance b76dd94e-c14b-48d4-bb7f-020313412ca2 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1253.594764] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0024504d-62fb-479e-acff-10259442ab54 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.610249] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ceaabf-2696-42dd-9975-990c41324e72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.613297] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5295c778-c337-cbf8-cec9-1a1a72d2d4a3, 'name': SearchDatastore_Task, 'duration_secs': 0.009456} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.614258] env[61978]: DEBUG nova.network.neutron [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance_info_cache with network_info: [{"id": "7417d7e9-723d-408d-bfa4-e583af757e79", "address": "fa:16:3e:e6:e3:c6", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7417d7e9-72", "ovs_interfaceid": "7417d7e9-723d-408d-bfa4-e583af757e79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.616233] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8de2fab4-2eb8-4c83-a5b7-a5cac7877c3e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.647317] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bb430e-b7c8-4f2f-bf9f-24da6097074f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.651635] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Waiting for the task: (returnval){ [ 1253.651635] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527eaede-e99f-291f-72c7-d33bf8d2c240" [ 1253.651635] env[61978]: _type = "Task" [ 1253.651635] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.659606] env[61978]: DEBUG oslo_vmware.api [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395806, 'name': PowerOffVM_Task, 'duration_secs': 0.193032} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.661045] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2b639e-9f04-4b46-8441-425b046c46cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.664904] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1253.665103] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1253.669332] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55d52c77-58a7-41e0-9104-555d09ed5189 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.671699] env[61978]: DEBUG nova.compute.manager [req-dcfa2a6a-2df0-44ed-812c-a9c898b079e8 req-e8fea9f7-43fc-4f0a-9f8e-d1dcaa13c5d5 service nova] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Received event network-vif-deleted-5db0518b-5552-40e5-80e3-e15e330660eb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1253.672272] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527eaede-e99f-291f-72c7-d33bf8d2c240, 'name': SearchDatastore_Task, 'duration_secs': 0.010152} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.672824] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1253.673089] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f9b57cf4-f2e4-4d2a-9bd4-74952d46876d/f9b57cf4-f2e4-4d2a-9bd4-74952d46876d.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1253.673583] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92b8c782-f580-4ff2-b3c8-955da5c24782 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.683127] env[61978]: DEBUG nova.compute.provider_tree [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1253.688453] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Waiting for the task: (returnval){ [ 1253.688453] env[61978]: value = "task-1395808" [ 1253.688453] env[61978]: _type = "Task" [ 1253.688453] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.699810] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.736070] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1253.737080] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1253.737080] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Deleting the datastore file [datastore2] 03b08977-4b20-4bac-b48b-06ba5df4e579 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1253.737080] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8904c16-cbc0-4da3-b375-92a50cc4030f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.744117] env[61978]: DEBUG oslo_vmware.api [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for the task: (returnval){ [ 1253.744117] env[61978]: value = "task-1395809" [ 1253.744117] env[61978]: _type = "Task" [ 1253.744117] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.560980] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.560980] env[61978]: DEBUG nova.scheduler.client.report [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1254.578453] env[61978]: DEBUG oslo_vmware.api [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395790, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.578746] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.641253} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.581307] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] f9b57cf4-f2e4-4d2a-9bd4-74952d46876d/f9b57cf4-f2e4-4d2a-9bd4-74952d46876d.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1254.581539] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1254.581820] env[61978]: DEBUG oslo_vmware.api [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Task: {'id': task-1395809, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.22487} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.582625] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66c3cd24-2b4d-40e5-aa39-6b3c7f5a8471 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.585165] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1254.585165] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1254.585165] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1254.585165] env[61978]: INFO nova.compute.manager [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Took 1.46 seconds to destroy the instance on the hypervisor. [ 1254.585375] env[61978]: DEBUG oslo.service.loopingcall [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1254.586094] env[61978]: DEBUG nova.compute.manager [-] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1254.586192] env[61978]: DEBUG nova.network.neutron [-] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1254.592782] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Waiting for the task: (returnval){ [ 1254.592782] env[61978]: value = "task-1395810" [ 1254.592782] env[61978]: _type = "Task" [ 1254.592782] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.600916] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395810, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.063898] env[61978]: DEBUG oslo_vmware.api [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395790, 'name': ReconfigVM_Task, 'duration_secs': 5.776537} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.064229] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1255.064500] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Reconfigured VM to detach interface {{(pid=61978) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1255.071286] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.764s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.072930] env[61978]: DEBUG nova.compute.manager [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1255.074423] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.950s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.103740] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395810, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062962} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.104086] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1255.105016] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1b64e4-6332-439f-bae2-6b8306204c94 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.133281] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] f9b57cf4-f2e4-4d2a-9bd4-74952d46876d/f9b57cf4-f2e4-4d2a-9bd4-74952d46876d.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1255.133918] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90640773-c4b8-4d82-84dc-0e35b2a39de6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.157895] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Waiting for the task: (returnval){ [ 1255.157895] env[61978]: value = "task-1395812" [ 1255.157895] env[61978]: _type = "Task" [ 1255.157895] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.167830] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395812, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.494167] env[61978]: DEBUG nova.network.neutron [-] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.583641] env[61978]: INFO nova.compute.claims [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1255.593132] env[61978]: DEBUG nova.compute.utils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1255.598440] env[61978]: DEBUG nova.compute.manager [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Not allocating networking since 'none' was specified. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1255.669926] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395812, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.697476] env[61978]: DEBUG nova.compute.manager [req-51479f69-fe15-4d51-a136-826e66d789f6 req-5547c62d-a5cf-47c1-a3d5-20dc10365a35 service nova] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Received event network-vif-deleted-76c38b35-52ad-477c-9dfd-e1c64c8a7889 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1255.697476] env[61978]: DEBUG nova.compute.manager [req-51479f69-fe15-4d51-a136-826e66d789f6 req-5547c62d-a5cf-47c1-a3d5-20dc10365a35 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received event network-vif-deleted-45cdb631-5f61-4991-973e-3ba5b5ff0820 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1255.697476] env[61978]: INFO nova.compute.manager [req-51479f69-fe15-4d51-a136-826e66d789f6 req-5547c62d-a5cf-47c1-a3d5-20dc10365a35 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Neutron deleted interface 45cdb631-5f61-4991-973e-3ba5b5ff0820; detaching it from the instance and deleting it from the info cache [ 1255.698200] env[61978]: DEBUG nova.network.neutron [req-51479f69-fe15-4d51-a136-826e66d789f6 req-5547c62d-a5cf-47c1-a3d5-20dc10365a35 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updating instance_info_cache with network_info: [{"id": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "address": "fa:16:3e:14:c6:36", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5e24a1-8e", "ovs_interfaceid": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "759bb051-5bd1-42fb-896d-ced73e9f67a1", "address": "fa:16:3e:b1:58:6f", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap759bb051-5b", "ovs_interfaceid": "759bb051-5bd1-42fb-896d-ced73e9f67a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.997191] env[61978]: INFO nova.compute.manager [-] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Took 1.41 seconds to deallocate network for instance. [ 1256.100483] env[61978]: INFO nova.compute.resource_tracker [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating resource usage from migration 31398f15-05ee-49eb-8ce0-4d60c312ca83 [ 1256.103793] env[61978]: DEBUG nova.compute.manager [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1256.115315] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84cb23e5-3f9d-4e96-a4ba-5017e0b5b07b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.143992] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance '59f32dd0-1faa-4059-9ef3-b177e8f4fa4c' progress to 0 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1256.172070] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395812, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.201038] env[61978]: DEBUG oslo_concurrency.lockutils [req-51479f69-fe15-4d51-a136-826e66d789f6 req-5547c62d-a5cf-47c1-a3d5-20dc10365a35 service nova] Acquiring lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.201369] env[61978]: DEBUG oslo_concurrency.lockutils [req-51479f69-fe15-4d51-a136-826e66d789f6 req-5547c62d-a5cf-47c1-a3d5-20dc10365a35 service nova] Acquired lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.203939] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76286b82-c904-45c1-adb0-105dae0d2920 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.240213] env[61978]: DEBUG oslo_concurrency.lockutils [req-51479f69-fe15-4d51-a136-826e66d789f6 req-5547c62d-a5cf-47c1-a3d5-20dc10365a35 service nova] Releasing lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1256.240865] env[61978]: WARNING nova.compute.manager [req-51479f69-fe15-4d51-a136-826e66d789f6 req-5547c62d-a5cf-47c1-a3d5-20dc10365a35 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Detach interface failed, port_id=45cdb631-5f61-4991-973e-3ba5b5ff0820, reason: No device with interface-id 45cdb631-5f61-4991-973e-3ba5b5ff0820 exists on VM: nova.exception.NotFound: No device with interface-id 45cdb631-5f61-4991-973e-3ba5b5ff0820 exists on VM [ 1256.438603] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce22a949-9721-4c3a-941b-56fb7c13b626 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.446718] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c8050f-c6de-4a9f-a403-da54b6bd0c58 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.484787] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e5e46f-3076-469f-bd7b-982de6d33540 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.493816] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838005fc-aa13-47d3-8aab-e9f8895cea01 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.512649] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.513268] env[61978]: DEBUG nova.compute.provider_tree [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1256.545672] env[61978]: DEBUG nova.network.neutron [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Port d2d39b09-4acd-4f24-aa07-31e86f78f134 binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1256.545949] env[61978]: DEBUG oslo_concurrency.lockutils [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.546171] env[61978]: DEBUG oslo_concurrency.lockutils [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.546975] env[61978]: DEBUG nova.network.neutron [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1256.549211] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.549211] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.549211] env[61978]: DEBUG nova.network.neutron [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1256.653076] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1256.654124] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3d42a84-0640-434b-9f1b-e6ed547096fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.665361] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1256.665361] env[61978]: value = "task-1395813" [ 1256.665361] env[61978]: _type = "Task" [ 1256.665361] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.672663] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395812, 'name': ReconfigVM_Task, 'duration_secs': 1.104922} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.673809] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Reconfigured VM instance instance-0000005f to attach disk [datastore2] f9b57cf4-f2e4-4d2a-9bd4-74952d46876d/f9b57cf4-f2e4-4d2a-9bd4-74952d46876d.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1256.673959] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbaf2f3d-50a0-44a1-8ccf-01fdd3ab8b84 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.678459] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.683361] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Waiting for the task: (returnval){ [ 1256.683361] env[61978]: value = "task-1395814" [ 1256.683361] env[61978]: _type = "Task" [ 1256.683361] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.692813] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395814, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.003180] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1257.003484] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1257.003781] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1257.004076] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1257.004279] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.011290] env[61978]: INFO nova.compute.manager [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Terminating instance [ 1257.011290] env[61978]: DEBUG nova.compute.manager [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1257.011290] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1257.011994] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16c01a3-db62-4a75-8515-d5a3601608dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.015825] env[61978]: DEBUG nova.scheduler.client.report [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1257.024102] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1257.024102] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5bd21437-4510-4350-8cfe-b74a71cdf449 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.028602] env[61978]: DEBUG oslo_vmware.api [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1257.028602] env[61978]: value = "task-1395815" [ 1257.028602] env[61978]: _type = "Task" [ 1257.028602] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.039356] env[61978]: DEBUG oslo_vmware.api [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395815, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.124199] env[61978]: DEBUG nova.compute.manager [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1257.148575] env[61978]: DEBUG nova.virt.hardware [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1257.148830] env[61978]: DEBUG nova.virt.hardware [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1257.149035] env[61978]: DEBUG nova.virt.hardware [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1257.149245] env[61978]: DEBUG nova.virt.hardware [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1257.149397] env[61978]: DEBUG nova.virt.hardware [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1257.149575] env[61978]: DEBUG nova.virt.hardware [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1257.149807] env[61978]: DEBUG nova.virt.hardware [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1257.149987] env[61978]: DEBUG nova.virt.hardware [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1257.150753] env[61978]: DEBUG nova.virt.hardware [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1257.150869] env[61978]: DEBUG nova.virt.hardware [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1257.151085] env[61978]: DEBUG nova.virt.hardware [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1257.151984] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb367be8-c778-46b7-a2fe-6c10819669b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.164241] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085434dc-0f32-420a-b24b-6a6668ac3e0f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.190366] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395813, 'name': PowerOffVM_Task, 'duration_secs': 0.189004} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.190366] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Instance VIF info [] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1257.194915] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Creating folder: Project (7ee4d1146d794e1f80bdcaeef446faa2). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1257.198201] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1257.199367] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance '59f32dd0-1faa-4059-9ef3-b177e8f4fa4c' progress to 17 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1257.205962] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2116af76-2ea2-4019-85a8-aee2bba23f21 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.217046] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395814, 'name': Rename_Task, 'duration_secs': 0.182908} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.217046] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1257.217046] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c09d91b3-7b77-40c7-ac0e-8da94588231e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.221182] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Created folder: Project (7ee4d1146d794e1f80bdcaeef446faa2) in parent group-v295764. [ 1257.221476] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Creating folder: Instances. Parent ref: group-v296022. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1257.221782] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-905b59af-d8c1-42b4-83b5-c1bc1bafae2e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.227415] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Waiting for the task: (returnval){ [ 1257.227415] env[61978]: value = "task-1395818" [ 1257.227415] env[61978]: _type = "Task" [ 1257.227415] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.233253] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Created folder: Instances in parent group-v296022. [ 1257.233510] env[61978]: DEBUG oslo.service.loopingcall [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1257.234300] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1257.236051] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0bf77820-ac66-43f3-b4b0-0a0a2fb34c1b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.254178] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395818, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.259951] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1257.259951] env[61978]: value = "task-1395820" [ 1257.259951] env[61978]: _type = "Task" [ 1257.259951] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.273099] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395820, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.519534] env[61978]: INFO nova.network.neutron [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Port 759bb051-5bd1-42fb-896d-ced73e9f67a1 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1257.519998] env[61978]: DEBUG nova.network.neutron [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updating instance_info_cache with network_info: [{"id": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "address": "fa:16:3e:14:c6:36", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5e24a1-8e", "ovs_interfaceid": "3c5e24a1-8ef7-45a5-a39a-4ce790adc338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.522900] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.448s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.526774] env[61978]: INFO nova.compute.manager [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Migrating [ 1257.535119] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.525s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1257.535516] env[61978]: DEBUG nova.objects.instance [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Lazy-loading 'resources' on Instance uuid 27713bbd-1234-44ae-8520-78d85baaae12 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1257.542440] env[61978]: DEBUG nova.network.neutron [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance_info_cache with network_info: [{"id": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "address": "fa:16:3e:3b:95:21", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2d39b09-4a", "ovs_interfaceid": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.561100] env[61978]: DEBUG oslo_concurrency.lockutils [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1257.571683] env[61978]: DEBUG oslo_vmware.api [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395815, 'name': PowerOffVM_Task, 'duration_secs': 0.211751} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.572351] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1257.572351] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1257.573018] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-291c5ec4-3457-45f3-a4c2-a467f1e6d758 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.681972] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1257.682378] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1257.682481] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Deleting the datastore file [datastore2] 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1257.682689] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99043b64-286e-4f2f-b9b4-3997e9e1e2d8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.691856] env[61978]: DEBUG oslo_vmware.api [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1257.691856] env[61978]: value = "task-1395822" [ 1257.691856] env[61978]: _type = "Task" [ 1257.691856] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.703376] env[61978]: DEBUG oslo_vmware.api [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395822, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.712014] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1257.712385] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1257.712498] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1257.712940] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1257.712940] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1257.712940] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1257.713305] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1257.713468] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1257.713621] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1257.713940] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1257.714046] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1257.720836] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84f78f67-5283-4cc2-b944-1db56b26fa5f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.737394] env[61978]: DEBUG nova.compute.manager [req-da5d5bdb-0260-4c05-997d-9123f56f5c9f req-268b877b-c421-49a9-9edd-ac578156ada3 service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received event network-vif-deleted-759bb051-5bd1-42fb-896d-ced73e9f67a1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1257.749509] env[61978]: DEBUG oslo_vmware.api [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395818, 'name': PowerOnVM_Task, 'duration_secs': 0.496837} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.753147] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1257.753147] env[61978]: INFO nova.compute.manager [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Took 8.69 seconds to spawn the instance on the hypervisor. [ 1257.753147] env[61978]: DEBUG nova.compute.manager [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1257.753147] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1257.753147] env[61978]: value = "task-1395823" [ 1257.753147] env[61978]: _type = "Task" [ 1257.753147] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.753147] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93334f4-2d9b-4c49-b750-592d7c828a9b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.770864] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395823, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.779283] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395820, 'name': CreateVM_Task, 'duration_secs': 0.301558} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.779283] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1257.779283] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.779283] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.779283] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1257.779474] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddb8a3a9-689b-4822-aeef-4d656f09456a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.787023] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1257.787023] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52569e49-9367-2b5d-d0fd-bdc506492507" [ 1257.787023] env[61978]: _type = "Task" [ 1257.787023] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.793542] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52569e49-9367-2b5d-d0fd-bdc506492507, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.877526] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Acquiring lock "845ec88d-5d2b-479c-a2d1-fa235b2b87b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1257.877761] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Lock "845ec88d-5d2b-479c-a2d1-fa235b2b87b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.038697] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.065320] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1258.066106] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.066106] env[61978]: DEBUG nova.network.neutron [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1258.070532] env[61978]: DEBUG nova.compute.manager [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=61978) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:897}} [ 1258.200043] env[61978]: DEBUG oslo_vmware.api [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395822, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.349907} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.200375] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1258.200609] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1258.200852] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1258.201088] env[61978]: INFO nova.compute.manager [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1258.201381] env[61978]: DEBUG oslo.service.loopingcall [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1258.201680] env[61978]: DEBUG nova.compute.manager [-] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1258.201812] env[61978]: DEBUG nova.network.neutron [-] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1258.246553] env[61978]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 759bb051-5bd1-42fb-896d-ced73e9f67a1 could not be found.", "detail": ""}} {{(pid=61978) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1258.246553] env[61978]: DEBUG nova.network.neutron [-] Unable to show port 759bb051-5bd1-42fb-896d-ced73e9f67a1 as it no longer exists. {{(pid=61978) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1258.265846] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395823, 'name': ReconfigVM_Task, 'duration_secs': 0.246009} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.266238] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance '59f32dd0-1faa-4059-9ef3-b177e8f4fa4c' progress to 33 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1258.280822] env[61978]: INFO nova.compute.manager [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Took 25.38 seconds to build instance. [ 1258.285421] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a4a9f8-016e-41f0-9223-c66840131335 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.299356] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8989bcae-c1c0-4a77-a039-15163c595223 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.302786] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52569e49-9367-2b5d-d0fd-bdc506492507, 'name': SearchDatastore_Task, 'duration_secs': 0.015608} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.303734] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.303996] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1258.304287] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1258.304457] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.304744] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1258.305652] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c1c1159-aa22-4468-80af-81ff27e1ba4c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.334065] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66855a80-4a79-459f-85ee-9e5b3152cee4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.342090] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b9dac3-9e24-41a9-8e0a-8a2190596f0c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.347468] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1258.348044] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1258.348650] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fb835fd-a891-4a8c-bb7c-9db059c499c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.359942] env[61978]: DEBUG nova.compute.provider_tree [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.364513] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1258.364513] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520a2572-396f-137a-c582-42922df8fa6a" [ 1258.364513] env[61978]: _type = "Task" [ 1258.364513] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.372219] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520a2572-396f-137a-c582-42922df8fa6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.380725] env[61978]: DEBUG nova.compute.manager [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1258.543025] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7b93814c-1158-496a-9949-ec1215cd2e21 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9-45cdb631-5f61-4991-973e-3ba5b5ff0820" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.900s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.772756] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1258.773108] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1258.773268] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1258.773373] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1258.773523] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1258.773673] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1258.773882] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1258.774861] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1258.775139] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1258.775344] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1258.776634] env[61978]: DEBUG nova.virt.hardware [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1258.784449] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Reconfiguring VM instance instance-0000002a to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1258.785693] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d1dcaf86-35ef-4660-9fb5-9ffe1a79e723 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Lock "f9b57cf4-f2e4-4d2a-9bd4-74952d46876d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.898s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.785955] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c39b5b5-4e5b-4e7c-a319-87dcba9e35c1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.808057] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1258.808057] env[61978]: value = "task-1395824" [ 1258.808057] env[61978]: _type = "Task" [ 1258.808057] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.817119] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395824, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.865187] env[61978]: DEBUG nova.scheduler.client.report [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1258.880140] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520a2572-396f-137a-c582-42922df8fa6a, 'name': SearchDatastore_Task, 'duration_secs': 0.055885} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.883106] env[61978]: DEBUG nova.network.neutron [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance_info_cache with network_info: [{"id": "69d57c29-bde4-4e04-8f75-f8f4e410d10b", "address": "fa:16:3e:6b:47:c3", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69d57c29-bd", "ovs_interfaceid": "69d57c29-bde4-4e04-8f75-f8f4e410d10b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.893041] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e2362b5-1bc5-4fd5-9957-7c9088e0e78f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.904913] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1258.904913] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52569969-08a5-6ec2-279a-9d07081e1d7a" [ 1258.904913] env[61978]: _type = "Task" [ 1258.904913] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.919203] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52569969-08a5-6ec2-279a-9d07081e1d7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.922150] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.201326] env[61978]: DEBUG oslo_concurrency.lockutils [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Acquiring lock "f9b57cf4-f2e4-4d2a-9bd4-74952d46876d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.201568] env[61978]: DEBUG oslo_concurrency.lockutils [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Lock "f9b57cf4-f2e4-4d2a-9bd4-74952d46876d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.201792] env[61978]: DEBUG oslo_concurrency.lockutils [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Acquiring lock "f9b57cf4-f2e4-4d2a-9bd4-74952d46876d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.202023] env[61978]: DEBUG oslo_concurrency.lockutils [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Lock "f9b57cf4-f2e4-4d2a-9bd4-74952d46876d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.202207] env[61978]: DEBUG oslo_concurrency.lockutils [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Lock "f9b57cf4-f2e4-4d2a-9bd4-74952d46876d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1259.204086] env[61978]: DEBUG oslo_concurrency.lockutils [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.204604] env[61978]: INFO nova.compute.manager [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Terminating instance [ 1259.206371] env[61978]: DEBUG nova.compute.manager [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1259.206607] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1259.207440] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b56d15-5ae7-498e-a776-4c6a63fdaa9c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.214767] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1259.215014] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e13055af-6128-4782-a617-2c9a7a993bcc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.220789] env[61978]: DEBUG oslo_vmware.api [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Waiting for the task: (returnval){ [ 1259.220789] env[61978]: value = "task-1395826" [ 1259.220789] env[61978]: _type = "Task" [ 1259.220789] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.228368] env[61978]: DEBUG oslo_vmware.api [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395826, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.269288] env[61978]: DEBUG nova.network.neutron [-] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.323512] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395824, 'name': ReconfigVM_Task, 'duration_secs': 0.361885} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.323666] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Reconfigured VM instance instance-0000002a to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1259.324507] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bede92e-9026-41b9-b62b-a3a4e5734d32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.349658] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c/59f32dd0-1faa-4059-9ef3-b177e8f4fa4c.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1259.349658] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6729f825-be2c-499b-ae7b-5dbae6bc8d80 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.370026] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1259.370026] env[61978]: value = "task-1395827" [ 1259.370026] env[61978]: _type = "Task" [ 1259.370026] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.373759] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.839s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1259.376866] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.359s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.377167] env[61978]: DEBUG nova.objects.instance [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lazy-loading 'resources' on Instance uuid 17c56c1c-9992-4559-ad23-c68909ae6792 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.385192] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395827, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.386901] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1259.419200] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52569969-08a5-6ec2-279a-9d07081e1d7a, 'name': SearchDatastore_Task, 'duration_secs': 0.011734} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.419200] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1259.419200] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] de8abe58-e0c2-4eaf-b3a6-7106e0861080/de8abe58-e0c2-4eaf-b3a6-7106e0861080.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1259.419200] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95786601-0244-4e87-8409-eca9d67ae546 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.425965] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1259.425965] env[61978]: value = "task-1395828" [ 1259.425965] env[61978]: _type = "Task" [ 1259.425965] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.435698] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395828, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.436368] env[61978]: INFO nova.scheduler.client.report [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Deleted allocations for instance 27713bbd-1234-44ae-8520-78d85baaae12 [ 1259.732751] env[61978]: DEBUG oslo_vmware.api [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395826, 'name': PowerOffVM_Task, 'duration_secs': 0.172069} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.733017] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1259.733195] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1259.733477] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04e35239-1fbb-49e4-bb80-34dd2e433d6e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.761954] env[61978]: DEBUG nova.compute.manager [req-bd24590b-ba26-4bd4-a36f-ea64d00f064b req-b779b998-7208-4d19-82e9-de55836788bd service nova] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Received event network-vif-deleted-3c5e24a1-8ef7-45a5-a39a-4ce790adc338 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1259.771733] env[61978]: INFO nova.compute.manager [-] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Took 1.57 seconds to deallocate network for instance. [ 1259.843549] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1259.843932] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1259.844048] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Deleting the datastore file [datastore2] f9b57cf4-f2e4-4d2a-9bd4-74952d46876d {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1259.844636] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e0be5f2-20d8-46ac-a67d-bd8b5fb842e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.859992] env[61978]: DEBUG oslo_vmware.api [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Waiting for the task: (returnval){ [ 1259.859992] env[61978]: value = "task-1395830" [ 1259.859992] env[61978]: _type = "Task" [ 1259.859992] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.870522] env[61978]: DEBUG oslo_vmware.api [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395830, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.884714] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395827, 'name': ReconfigVM_Task, 'duration_secs': 0.300023} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.885456] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c/59f32dd0-1faa-4059-9ef3-b177e8f4fa4c.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1259.885855] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance '59f32dd0-1faa-4059-9ef3-b177e8f4fa4c' progress to 50 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1259.935759] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395828, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48002} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.937312] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] de8abe58-e0c2-4eaf-b3a6-7106e0861080/de8abe58-e0c2-4eaf-b3a6-7106e0861080.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1259.937477] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1259.937948] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01030386-b08a-4d40-a148-c240937e990a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.949044] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d34ea8a6-28c6-4593-8e07-0f28f5f3d229 tempest-ServerRescueTestJSONUnderV235-853842026 tempest-ServerRescueTestJSONUnderV235-853842026-project-member] Lock "27713bbd-1234-44ae-8520-78d85baaae12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.071s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1259.951809] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1259.951809] env[61978]: value = "task-1395831" [ 1259.951809] env[61978]: _type = "Task" [ 1259.951809] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.969537] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395831, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.146369] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144b8cc3-8f71-45a7-9145-9375ff4b969c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.154347] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96cabdc8-ebf8-4d9e-902c-e52250353dbb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.183714] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e028e8-244a-484d-9d56-bc859b89731d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.191015] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7aaf054-5b52-4e0c-9444-d18e54d1d08a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.204107] env[61978]: DEBUG nova.compute.provider_tree [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1260.279309] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.370234] env[61978]: DEBUG oslo_vmware.api [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Task: {'id': task-1395830, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168605} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.370516] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1260.370710] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1260.370895] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1260.371097] env[61978]: INFO nova.compute.manager [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1260.371349] env[61978]: DEBUG oslo.service.loopingcall [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1260.371557] env[61978]: DEBUG nova.compute.manager [-] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1260.371654] env[61978]: DEBUG nova.network.neutron [-] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1260.396978] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1904aac9-b8e5-4790-aa5a-6b13b679cee4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.420871] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6d1288-cec4-419e-bbea-3e5ce8b42cf4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.424730] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea6d728-e026-4d0a-bb25-700346bb40da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.444116] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance '59f32dd0-1faa-4059-9ef3-b177e8f4fa4c' progress to 67 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1260.464188] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance '4d357d46-8bbb-4228-a5a6-2ce67fe037d7' progress to 0 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1260.476141] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395831, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061073} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.476446] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1260.477848] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc97b096-7ba3-4e1c-9363-3b14023015ec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.500656] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] de8abe58-e0c2-4eaf-b3a6-7106e0861080/de8abe58-e0c2-4eaf-b3a6-7106e0861080.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1260.501033] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba83d6dd-a487-462f-80f7-8a1eb3a04da5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.521413] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1260.521413] env[61978]: value = "task-1395832" [ 1260.521413] env[61978]: _type = "Task" [ 1260.521413] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.529536] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395832, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.708210] env[61978]: DEBUG nova.scheduler.client.report [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1260.973695] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1260.973967] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6a0b9e7-fd76-4c81-bb19-aacfb53bcbd5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.982837] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1260.982837] env[61978]: value = "task-1395833" [ 1260.982837] env[61978]: _type = "Task" [ 1260.982837] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.991250] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395833, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.008015] env[61978]: DEBUG nova.network.neutron [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Port 7417d7e9-723d-408d-bfa4-e583af757e79 binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1261.031274] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395832, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.215554] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.837s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.216665] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.386s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1261.216824] env[61978]: DEBUG nova.objects.instance [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lazy-loading 'resources' on Instance uuid b76dd94e-c14b-48d4-bb7f-020313412ca2 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1261.291125] env[61978]: DEBUG nova.network.neutron [-] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.345191] env[61978]: INFO nova.scheduler.client.report [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Deleted allocations for instance 17c56c1c-9992-4559-ad23-c68909ae6792 [ 1261.491612] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395833, 'name': PowerOffVM_Task, 'duration_secs': 0.318197} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.494214] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1261.494214] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance '4d357d46-8bbb-4228-a5a6-2ce67fe037d7' progress to 17 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1261.536706] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395832, 'name': ReconfigVM_Task, 'duration_secs': 0.730575} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.537010] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Reconfigured VM instance instance-00000060 to attach disk [datastore2] de8abe58-e0c2-4eaf-b3a6-7106e0861080/de8abe58-e0c2-4eaf-b3a6-7106e0861080.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1261.537988] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b70e266-7497-45e1-afe9-c4f5e7637d40 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.548826] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1261.548826] env[61978]: value = "task-1395835" [ 1261.548826] env[61978]: _type = "Task" [ 1261.548826] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.562801] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395835, 'name': Rename_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.795530] env[61978]: INFO nova.compute.manager [-] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Took 1.42 seconds to deallocate network for instance. [ 1261.854994] env[61978]: DEBUG nova.compute.manager [req-ebebe257-9f44-4029-b965-f79524553b1f req-8287260b-dd85-47f1-83ea-7b25734823d7 service nova] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Received event network-vif-deleted-41133564-b2c5-468a-aafc-5e11f8388a94 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1261.855603] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1a669ee2-4080-4919-b989-505c8571084d tempest-ServersNegativeTestJSON-1006045042 tempest-ServersNegativeTestJSON-1006045042-project-member] Lock "17c56c1c-9992-4559-ad23-c68909ae6792" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.042s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.974736] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1484518-5099-4d3a-96e3-b94bb395a9f9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.982292] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df17cca-79a3-43ba-adaf-29d3491636f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.014185] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1262.014448] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1262.014687] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1262.014919] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1262.015177] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1262.015363] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1262.015579] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1262.015745] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1262.015920] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1262.016108] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1262.016290] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1262.028624] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-562e4b71-d846-43ac-ac79-4b92a1ee448d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.038982] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff4bea3-ebbe-4f7d-b04e-10bb32826834 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.051017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1262.051017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.051017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.061690] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1262.061690] env[61978]: value = "task-1395836" [ 1262.061690] env[61978]: _type = "Task" [ 1262.061690] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.061690] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165dd435-b89b-45e4-b72a-317ec3afb79b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.070219] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395835, 'name': Rename_Task, 'duration_secs': 0.133535} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.070915] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1262.071527] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa655cbc-af0f-4393-9b50-9998044aac80 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.088597] env[61978]: DEBUG nova.compute.provider_tree [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1262.088597] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395836, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.092566] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1262.092566] env[61978]: value = "task-1395837" [ 1262.092566] env[61978]: _type = "Task" [ 1262.092566] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.102602] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395837, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.307601] env[61978]: DEBUG oslo_concurrency.lockutils [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1262.573963] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395836, 'name': ReconfigVM_Task, 'duration_secs': 0.261767} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.574271] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance '4d357d46-8bbb-4228-a5a6-2ce67fe037d7' progress to 33 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1262.588504] env[61978]: DEBUG nova.scheduler.client.report [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1262.601049] env[61978]: DEBUG oslo_vmware.api [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395837, 'name': PowerOnVM_Task, 'duration_secs': 0.476635} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.601322] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1262.601529] env[61978]: INFO nova.compute.manager [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Took 5.48 seconds to spawn the instance on the hypervisor. [ 1262.601923] env[61978]: DEBUG nova.compute.manager [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1262.604262] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fb3f74-825e-4026-bc8f-accc509680e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.087017] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1263.087017] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1263.087017] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1263.087017] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1263.087017] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1263.087017] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1263.087017] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1263.087017] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1263.087017] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1263.087017] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1263.087017] env[61978]: DEBUG nova.virt.hardware [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1263.092659] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Reconfiguring VM instance instance-0000005d to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1263.101349] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1263.101349] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.101349] env[61978]: DEBUG nova.network.neutron [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1263.101349] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.882s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.101349] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7c0ea6b-9223-420e-94b9-b8f543efc36d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.115226] env[61978]: DEBUG oslo_concurrency.lockutils [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.034s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1263.115567] env[61978]: DEBUG nova.objects.instance [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lazy-loading 'resources' on Instance uuid e9b70b36-d0d8-430e-a5e7-588d3c75d7ff {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1263.126096] env[61978]: INFO nova.compute.manager [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Took 14.22 seconds to build instance. [ 1263.129437] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1263.129437] env[61978]: value = "task-1395838" [ 1263.129437] env[61978]: _type = "Task" [ 1263.129437] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.144507] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395838, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.193432] env[61978]: INFO nova.scheduler.client.report [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Deleted allocations for instance b76dd94e-c14b-48d4-bb7f-020313412ca2 [ 1263.628233] env[61978]: DEBUG oslo_concurrency.lockutils [None req-df79a75e-1820-4fc2-aa98-a8fce05b6e7b tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lock "de8abe58-e0c2-4eaf-b3a6-7106e0861080" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.739s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.639908] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395838, 'name': ReconfigVM_Task, 'duration_secs': 0.219023} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.640291] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Reconfigured VM instance instance-0000005d to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1263.641147] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739f4851-5e64-4c2e-9506-c55f9020853b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.665027] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 4d357d46-8bbb-4228-a5a6-2ce67fe037d7/4d357d46-8bbb-4228-a5a6-2ce67fe037d7.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1263.667830] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b6a3d6b-f82d-42f0-9b06-e60c409d32b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.689547] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1263.689547] env[61978]: value = "task-1395840" [ 1263.689547] env[61978]: _type = "Task" [ 1263.689547] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.693495] env[61978]: INFO nova.compute.manager [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Rebuilding instance [ 1263.704748] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a32593b7-d75f-4a6f-a69c-f5de72d6405e tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "b76dd94e-c14b-48d4-bb7f-020313412ca2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.343s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.706123] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395840, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.755530] env[61978]: DEBUG nova.compute.manager [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1263.756553] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceca02bf-cec5-4413-b7ad-9b0962477cee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.915848] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fbc411-089b-4237-9fa6-aeab4febfbb2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.925921] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1438a699-ce85-41a3-8e47-294d26926b0e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.960079] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3774fad-0788-4bf6-a969-ab7cef134afe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.970301] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85e317e-e320-48f9-b937-4266e2328492 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.983337] env[61978]: DEBUG nova.compute.provider_tree [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1264.050919] env[61978]: DEBUG nova.network.neutron [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance_info_cache with network_info: [{"id": "7417d7e9-723d-408d-bfa4-e583af757e79", "address": "fa:16:3e:e6:e3:c6", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7417d7e9-72", "ovs_interfaceid": "7417d7e9-723d-408d-bfa4-e583af757e79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.201067] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395840, 'name': ReconfigVM_Task, 'duration_secs': 0.28749} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.201067] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 4d357d46-8bbb-4228-a5a6-2ce67fe037d7/4d357d46-8bbb-4228-a5a6-2ce67fe037d7.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1264.202402] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance '4d357d46-8bbb-4228-a5a6-2ce67fe037d7' progress to 50 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1264.269179] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1264.269493] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ece11e4-cfa7-4759-b257-6357bb5a32a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.277434] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1264.277434] env[61978]: value = "task-1395841" [ 1264.277434] env[61978]: _type = "Task" [ 1264.277434] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.285661] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395841, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.487236] env[61978]: DEBUG nova.scheduler.client.report [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1264.553494] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1264.709974] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6701bab-6653-46f2-9dae-b7a9f7f23bde {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.730196] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcf55b4-0e83-4852-9a35-a357614591b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.749805] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance '4d357d46-8bbb-4228-a5a6-2ce67fe037d7' progress to 67 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1264.787458] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395841, 'name': PowerOffVM_Task, 'duration_secs': 0.204528} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.787748] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1264.787976] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1264.788779] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796000fc-3752-46a6-b1f7-3eb05261a586 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.795197] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1264.795431] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67769669-2ca1-412c-858c-85ea047423a3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.819960] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1264.820244] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1264.820445] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Deleting the datastore file [datastore2] de8abe58-e0c2-4eaf-b3a6-7106e0861080 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1264.821015] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-145f1e4a-73ff-46db-a449-52131e6cee74 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.827772] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1264.827772] env[61978]: value = "task-1395843" [ 1264.827772] env[61978]: _type = "Task" [ 1264.827772] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.835230] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.992484] env[61978]: DEBUG oslo_concurrency.lockutils [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.877s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.994783] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.482s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1264.995088] env[61978]: DEBUG nova.objects.instance [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lazy-loading 'resources' on Instance uuid 03b08977-4b20-4bac-b48b-06ba5df4e579 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1265.048523] env[61978]: INFO nova.scheduler.client.report [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Deleted allocations for instance e9b70b36-d0d8-430e-a5e7-588d3c75d7ff [ 1265.075113] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e3f154-1973-4e64-b96b-41b355b0ea70 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.093416] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949396c5-a028-42cf-a80e-792e3438e3ed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.099730] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance '59f32dd0-1faa-4059-9ef3-b177e8f4fa4c' progress to 83 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1265.338794] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212061} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.339151] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1265.339383] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1265.339580] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1265.345790] env[61978]: DEBUG nova.network.neutron [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Port 69d57c29-bde4-4e04-8f75-f8f4e410d10b binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1265.555496] env[61978]: DEBUG oslo_concurrency.lockutils [None req-77fbcbf9-8539-4ef7-b5da-28134dddea0f tempest-ListServersNegativeTestJSON-431519154 tempest-ListServersNegativeTestJSON-431519154-project-member] Lock "e9b70b36-d0d8-430e-a5e7-588d3c75d7ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.938s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1265.606019] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1265.606019] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b42d28d6-12c2-4cf5-867a-cdf4e4048a7b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.613075] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1265.613075] env[61978]: value = "task-1395844" [ 1265.613075] env[61978]: _type = "Task" [ 1265.613075] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.622926] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395844, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.678204] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3f2d4a-452c-4292-8bec-71169499765b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.685274] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c407a31-05a6-4131-ba60-462c0aa255c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.714558] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6017388-5062-412b-b47f-9f8c27db0c19 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.722787] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606a86a2-dd1f-48ef-ae2e-cebda09a2fe0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.736384] env[61978]: DEBUG nova.compute.provider_tree [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1266.122511] env[61978]: DEBUG oslo_vmware.api [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395844, 'name': PowerOnVM_Task, 'duration_secs': 0.387588} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.122803] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1266.123015] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a62848-5708-4ff8-b429-6afd1e1d3fb7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance '59f32dd0-1faa-4059-9ef3-b177e8f4fa4c' progress to 100 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1266.240090] env[61978]: DEBUG nova.scheduler.client.report [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1266.370762] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.371099] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.371316] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.406982] env[61978]: DEBUG nova.virt.hardware [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1266.406982] env[61978]: DEBUG nova.virt.hardware [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1266.406982] env[61978]: DEBUG nova.virt.hardware [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1266.406982] env[61978]: DEBUG nova.virt.hardware [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1266.406982] env[61978]: DEBUG nova.virt.hardware [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1266.406982] env[61978]: DEBUG nova.virt.hardware [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1266.406982] env[61978]: DEBUG nova.virt.hardware [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1266.406982] env[61978]: DEBUG nova.virt.hardware [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1266.407424] env[61978]: DEBUG nova.virt.hardware [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1266.407424] env[61978]: DEBUG nova.virt.hardware [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1266.407424] env[61978]: DEBUG nova.virt.hardware [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1266.408644] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df6e125-9f6b-47d8-9642-80bd55dcf6ae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.416463] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2015152-d6ab-40d5-836e-d03a68767efd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.429868] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Instance VIF info [] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1266.437015] env[61978]: DEBUG oslo.service.loopingcall [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1266.437222] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1266.437434] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60d446c7-62f7-4f16-bf6a-e1f655f703a4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.454157] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1266.454157] env[61978]: value = "task-1395845" [ 1266.454157] env[61978]: _type = "Task" [ 1266.454157] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.461646] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395845, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.745017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.750s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.747270] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.825s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.748855] env[61978]: INFO nova.compute.claims [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1266.832853] env[61978]: INFO nova.scheduler.client.report [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Deleted allocations for instance 03b08977-4b20-4bac-b48b-06ba5df4e579 [ 1266.966968] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395845, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.341552] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e5766ed7-4c17-4230-bfc8-d894c75dc55f tempest-ImagesOneServerNegativeTestJSON-569281538 tempest-ImagesOneServerNegativeTestJSON-569281538-project-member] Lock "03b08977-4b20-4bac-b48b-06ba5df4e579" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.226s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1267.449480] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1267.449732] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.449879] env[61978]: DEBUG nova.network.neutron [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1267.468410] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395845, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.729654] env[61978]: DEBUG oslo_concurrency.lockutils [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.729946] env[61978]: DEBUG oslo_concurrency.lockutils [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.940742] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521ad0d2-5d69-45de-a69c-98df046ce89a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.948514] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c900bea6-ebc6-4eeb-b4a4-588e9e7ab91c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.995793] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74aa9c9e-e2b1-4125-a618-d6c18b361586 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.003755] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395845, 'name': CreateVM_Task, 'duration_secs': 1.362149} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.005667] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1268.006191] env[61978]: DEBUG oslo_concurrency.lockutils [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1268.006371] env[61978]: DEBUG oslo_concurrency.lockutils [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.006701] env[61978]: DEBUG oslo_concurrency.lockutils [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1268.008117] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475e5718-729e-4b2a-acfc-29526a5d7f1c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.011872] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2cdb6bd-94e5-44c5-829a-a2eec13732f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.017438] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1268.017438] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528e32d0-f9a8-3cf3-b009-e4ff99561a9b" [ 1268.017438] env[61978]: _type = "Task" [ 1268.017438] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.026130] env[61978]: DEBUG nova.compute.provider_tree [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.036691] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528e32d0-f9a8-3cf3-b009-e4ff99561a9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.232852] env[61978]: DEBUG nova.compute.manager [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1268.249128] env[61978]: DEBUG nova.network.neutron [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance_info_cache with network_info: [{"id": "69d57c29-bde4-4e04-8f75-f8f4e410d10b", "address": "fa:16:3e:6b:47:c3", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69d57c29-bd", "ovs_interfaceid": "69d57c29-bde4-4e04-8f75-f8f4e410d10b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.530766] env[61978]: DEBUG nova.scheduler.client.report [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1268.540169] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528e32d0-f9a8-3cf3-b009-e4ff99561a9b, 'name': SearchDatastore_Task, 'duration_secs': 0.032519} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.540482] env[61978]: DEBUG oslo_concurrency.lockutils [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1268.540719] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1268.540966] env[61978]: DEBUG oslo_concurrency.lockutils [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1268.541456] env[61978]: DEBUG oslo_concurrency.lockutils [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.541456] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1268.541603] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d40fe50-8e37-4895-84d4-87819fadf7e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.551190] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1268.551386] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1268.552110] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fa9d167-5ac1-4927-9556-bd97779254c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.557772] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1268.557772] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523a640a-c456-3423-301d-0ba11e6b7326" [ 1268.557772] env[61978]: _type = "Task" [ 1268.557772] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.565163] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523a640a-c456-3423-301d-0ba11e6b7326, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.755020] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1268.766716] env[61978]: DEBUG oslo_concurrency.lockutils [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1268.813294] env[61978]: DEBUG oslo_concurrency.lockutils [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1268.813572] env[61978]: DEBUG oslo_concurrency.lockutils [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.813772] env[61978]: DEBUG nova.compute.manager [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Going to confirm migration 6 {{(pid=61978) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1269.035420] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.288s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1269.035973] env[61978]: DEBUG nova.compute.manager [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1269.038632] env[61978]: DEBUG oslo_concurrency.lockutils [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 9.835s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1269.069086] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523a640a-c456-3423-301d-0ba11e6b7326, 'name': SearchDatastore_Task, 'duration_secs': 0.013313} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.069890] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd8154fa-6dbc-4723-9d20-b8342252c9e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.075804] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1269.075804] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522c7e12-0204-953e-a0a8-d6345cb4828a" [ 1269.075804] env[61978]: _type = "Task" [ 1269.075804] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.085172] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522c7e12-0204-953e-a0a8-d6345cb4828a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.294267] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20cfca5a-925d-430b-a4a2-07ca9e864ffb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.319492] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a7bd45-bfd1-4731-b707-cda74551b295 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.328537] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance '4d357d46-8bbb-4228-a5a6-2ce67fe037d7' progress to 83 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1269.400339] env[61978]: DEBUG oslo_concurrency.lockutils [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1269.401155] env[61978]: DEBUG oslo_concurrency.lockutils [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.401155] env[61978]: DEBUG nova.network.neutron [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1269.401155] env[61978]: DEBUG nova.objects.instance [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'info_cache' on Instance uuid 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1269.542814] env[61978]: DEBUG nova.compute.utils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1269.544740] env[61978]: DEBUG nova.objects.instance [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'migration_context' on Instance uuid d3c82821-0617-4de6-8109-813a67910ed1 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1269.545877] env[61978]: DEBUG nova.compute.manager [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1269.546064] env[61978]: DEBUG nova.network.neutron [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1269.590230] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522c7e12-0204-953e-a0a8-d6345cb4828a, 'name': SearchDatastore_Task, 'duration_secs': 0.021625} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.590626] env[61978]: DEBUG oslo_concurrency.lockutils [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1269.590968] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] de8abe58-e0c2-4eaf-b3a6-7106e0861080/de8abe58-e0c2-4eaf-b3a6-7106e0861080.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1269.591294] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3979b47-29f4-453b-ae0d-f0f51f00cd3f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.601473] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1269.601473] env[61978]: value = "task-1395846" [ 1269.601473] env[61978]: _type = "Task" [ 1269.601473] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.612736] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.614201] env[61978]: DEBUG nova.policy [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '00ddf3c7248b4ba1a832731f5d307766', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e163315a8674fb8835344dbdc454e36', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1269.837373] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1269.837704] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54b6b8c2-24e2-401d-869b-6a0f6a88eedc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.845306] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1269.845306] env[61978]: value = "task-1395847" [ 1269.845306] env[61978]: _type = "Task" [ 1269.845306] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.857285] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395847, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.915107] env[61978]: DEBUG nova.network.neutron [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Successfully created port: 55da250a-bf6c-4f74-98c0-a25c3605a4df {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1270.047311] env[61978]: DEBUG nova.compute.manager [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1270.117200] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395846, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.269231] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb6e7d6-5a81-4665-a57c-61cae6aef88b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.277584] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e44ad2-e256-4883-876c-8c553247b2f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.311295] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75a4b1a-108f-412f-a97a-9eabfd466636 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.320647] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad9a022-abb4-473e-8218-0c10034ba515 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.336052] env[61978]: DEBUG nova.compute.provider_tree [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1270.355925] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395847, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.615972] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395846, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.937014} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.615972] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] de8abe58-e0c2-4eaf-b3a6-7106e0861080/de8abe58-e0c2-4eaf-b3a6-7106e0861080.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1270.615972] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1270.615972] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be9b5975-fee4-4b4c-951e-91ffd59c77a7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.623370] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1270.623370] env[61978]: value = "task-1395848" [ 1270.623370] env[61978]: _type = "Task" [ 1270.623370] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.633342] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395848, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.669329] env[61978]: DEBUG nova.network.neutron [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance_info_cache with network_info: [{"id": "7417d7e9-723d-408d-bfa4-e583af757e79", "address": "fa:16:3e:e6:e3:c6", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7417d7e9-72", "ovs_interfaceid": "7417d7e9-723d-408d-bfa4-e583af757e79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.839933] env[61978]: DEBUG nova.scheduler.client.report [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1270.855613] env[61978]: DEBUG oslo_vmware.api [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395847, 'name': PowerOnVM_Task, 'duration_secs': 0.79409} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.856398] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1270.856597] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-cbbfa673-66e5-49c9-b846-3c77747756c6 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance '4d357d46-8bbb-4228-a5a6-2ce67fe037d7' progress to 100 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1271.060152] env[61978]: DEBUG nova.compute.manager [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1271.087290] env[61978]: DEBUG nova.virt.hardware [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1271.087948] env[61978]: DEBUG nova.virt.hardware [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1271.087948] env[61978]: DEBUG nova.virt.hardware [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1271.088083] env[61978]: DEBUG nova.virt.hardware [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1271.088221] env[61978]: DEBUG nova.virt.hardware [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1271.088398] env[61978]: DEBUG nova.virt.hardware [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1271.088656] env[61978]: DEBUG nova.virt.hardware [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1271.088858] env[61978]: DEBUG nova.virt.hardware [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1271.089127] env[61978]: DEBUG nova.virt.hardware [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1271.089348] env[61978]: DEBUG nova.virt.hardware [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1271.089579] env[61978]: DEBUG nova.virt.hardware [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1271.090540] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03eace85-72ac-4147-820a-44e3dd532ce5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.099233] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e3791f-cc97-43cf-90c8-ed99f2686257 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.131913] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395848, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066914} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.132130] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1271.133017] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96925a15-5b7f-4500-a01b-6380eb02d8b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.152187] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] de8abe58-e0c2-4eaf-b3a6-7106e0861080/de8abe58-e0c2-4eaf-b3a6-7106e0861080.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1271.152484] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fae5656d-dfe9-43c9-af63-50b8b2b60ffe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.171639] env[61978]: DEBUG oslo_concurrency.lockutils [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1271.171902] env[61978]: DEBUG nova.objects.instance [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'migration_context' on Instance uuid 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1271.173064] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1271.173064] env[61978]: value = "task-1395849" [ 1271.173064] env[61978]: _type = "Task" [ 1271.173064] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.183296] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395849, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.678141] env[61978]: DEBUG nova.objects.base [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Object Instance<59f32dd0-1faa-4059-9ef3-b177e8f4fa4c> lazy-loaded attributes: info_cache,migration_context {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1271.680016] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617075c1-1a4c-456c-88cc-0e35ab0d9ffe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.683787] env[61978]: DEBUG nova.compute.manager [req-0cc19cd2-f8d9-42ad-a87d-ec84c939d414 req-c0a36296-7229-4033-8ae7-2c2e8648d90e service nova] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Received event network-vif-plugged-55da250a-bf6c-4f74-98c0-a25c3605a4df {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1271.684016] env[61978]: DEBUG oslo_concurrency.lockutils [req-0cc19cd2-f8d9-42ad-a87d-ec84c939d414 req-c0a36296-7229-4033-8ae7-2c2e8648d90e service nova] Acquiring lock "845ec88d-5d2b-479c-a2d1-fa235b2b87b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.684326] env[61978]: DEBUG oslo_concurrency.lockutils [req-0cc19cd2-f8d9-42ad-a87d-ec84c939d414 req-c0a36296-7229-4033-8ae7-2c2e8648d90e service nova] Lock "845ec88d-5d2b-479c-a2d1-fa235b2b87b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.684527] env[61978]: DEBUG oslo_concurrency.lockutils [req-0cc19cd2-f8d9-42ad-a87d-ec84c939d414 req-c0a36296-7229-4033-8ae7-2c2e8648d90e service nova] Lock "845ec88d-5d2b-479c-a2d1-fa235b2b87b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.684749] env[61978]: DEBUG nova.compute.manager [req-0cc19cd2-f8d9-42ad-a87d-ec84c939d414 req-c0a36296-7229-4033-8ae7-2c2e8648d90e service nova] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] No waiting events found dispatching network-vif-plugged-55da250a-bf6c-4f74-98c0-a25c3605a4df {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1271.684925] env[61978]: WARNING nova.compute.manager [req-0cc19cd2-f8d9-42ad-a87d-ec84c939d414 req-c0a36296-7229-4033-8ae7-2c2e8648d90e service nova] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Received unexpected event network-vif-plugged-55da250a-bf6c-4f74-98c0-a25c3605a4df for instance with vm_state building and task_state spawning. [ 1271.688461] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395849, 'name': ReconfigVM_Task, 'duration_secs': 0.269779} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.689107] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Reconfigured VM instance instance-00000060 to attach disk [datastore2] de8abe58-e0c2-4eaf-b3a6-7106e0861080/de8abe58-e0c2-4eaf-b3a6-7106e0861080.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1271.689677] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c9bde666-02fe-4665-b08f-55bb177dffbd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.704999] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01cb70a7-dcd3-4669-9c44-0d6e18589ddc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.709630] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1271.709630] env[61978]: value = "task-1395850" [ 1271.709630] env[61978]: _type = "Task" [ 1271.709630] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.710887] env[61978]: DEBUG oslo_vmware.api [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1271.710887] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52329e9d-700a-0a10-daab-787bf85cffa6" [ 1271.710887] env[61978]: _type = "Task" [ 1271.710887] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.720774] env[61978]: DEBUG oslo_vmware.api [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52329e9d-700a-0a10-daab-787bf85cffa6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.723590] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395850, 'name': Rename_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.739942] env[61978]: DEBUG nova.network.neutron [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Successfully updated port: 55da250a-bf6c-4f74-98c0-a25c3605a4df {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1271.852280] env[61978]: DEBUG oslo_concurrency.lockutils [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.813s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.858230] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.579s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.858474] env[61978]: DEBUG nova.objects.instance [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'resources' on Instance uuid 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1272.223747] env[61978]: DEBUG oslo_vmware.api [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52329e9d-700a-0a10-daab-787bf85cffa6, 'name': SearchDatastore_Task, 'duration_secs': 0.019151} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.226805] env[61978]: DEBUG oslo_concurrency.lockutils [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.227189] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395850, 'name': Rename_Task, 'duration_secs': 0.125735} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.227464] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1272.227719] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-192c7ff7-e04a-4008-8565-23f94ec27730 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.236227] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1272.236227] env[61978]: value = "task-1395851" [ 1272.236227] env[61978]: _type = "Task" [ 1272.236227] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.246600] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Acquiring lock "refresh_cache-845ec88d-5d2b-479c-a2d1-fa235b2b87b3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1272.246600] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Acquired lock "refresh_cache-845ec88d-5d2b-479c-a2d1-fa235b2b87b3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.246600] env[61978]: DEBUG nova.network.neutron [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1272.246600] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395851, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.561757] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89df7386-089f-48a9-9642-0b6544fde795 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.571702] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69db816e-1ba6-4e70-aa60-a9945fe451c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.603777] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45cc9f3-cb82-40fa-82f3-d5b7fb2fdb78 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.611067] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4fcb5d-d1ca-425d-8ad4-6ee75e9ede31 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.625883] env[61978]: DEBUG nova.compute.provider_tree [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1272.748779] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395851, 'name': PowerOnVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.780577] env[61978]: DEBUG nova.network.neutron [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1272.922745] env[61978]: DEBUG nova.network.neutron [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Updating instance_info_cache with network_info: [{"id": "55da250a-bf6c-4f74-98c0-a25c3605a4df", "address": "fa:16:3e:19:31:fd", "network": {"id": "41fe5f05-f058-4b13-8bd8-778d98d892da", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-643498706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e163315a8674fb8835344dbdc454e36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76f377cd-5966-49b4-9210-907f592c694e", "external-id": "nsx-vlan-transportzone-124", "segmentation_id": 124, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55da250a-bf", "ovs_interfaceid": "55da250a-bf6c-4f74-98c0-a25c3605a4df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1273.130817] env[61978]: DEBUG nova.scheduler.client.report [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1273.248444] env[61978]: DEBUG oslo_vmware.api [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395851, 'name': PowerOnVM_Task, 'duration_secs': 0.678767} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.249212] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1273.249212] env[61978]: DEBUG nova.compute.manager [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1273.249964] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3545f7b8-ef47-46c2-a32b-61566dee9b6b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.416386] env[61978]: INFO nova.compute.manager [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Swapping old allocation on dict_keys(['44209228-3464-48ae-bc40-83eccd44b0cf']) held by migration 984cd406-40bb-41ab-8948-b39be4277799 for instance [ 1273.416386] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.416544] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1273.416737] env[61978]: DEBUG nova.compute.manager [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Going to confirm migration 7 {{(pid=61978) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1273.425107] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Releasing lock "refresh_cache-845ec88d-5d2b-479c-a2d1-fa235b2b87b3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1273.425572] env[61978]: DEBUG nova.compute.manager [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Instance network_info: |[{"id": "55da250a-bf6c-4f74-98c0-a25c3605a4df", "address": "fa:16:3e:19:31:fd", "network": {"id": "41fe5f05-f058-4b13-8bd8-778d98d892da", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-643498706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e163315a8674fb8835344dbdc454e36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76f377cd-5966-49b4-9210-907f592c694e", "external-id": "nsx-vlan-transportzone-124", "segmentation_id": 124, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55da250a-bf", "ovs_interfaceid": "55da250a-bf6c-4f74-98c0-a25c3605a4df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1273.426141] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:31:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '76f377cd-5966-49b4-9210-907f592c694e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55da250a-bf6c-4f74-98c0-a25c3605a4df', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1273.435689] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Creating folder: Project (6e163315a8674fb8835344dbdc454e36). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1273.439215] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0da83b03-ce1d-4e1c-be77-c0501b4f96ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.453917] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Created folder: Project (6e163315a8674fb8835344dbdc454e36) in parent group-v295764. [ 1273.453917] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Creating folder: Instances. Parent ref: group-v296026. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1273.453917] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a577cf8-617c-4b18-a8d4-796759d38068 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.455105] env[61978]: DEBUG nova.scheduler.client.report [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Overwriting current allocation {'allocations': {'44209228-3464-48ae-bc40-83eccd44b0cf': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 132}}, 'project_id': '2af733ffc4384fa1a2c59f4a45f1778c', 'user_id': '7026a28592af41ebb4dd7df6cfa33feb', 'consumer_generation': 1} on consumer d3c82821-0617-4de6-8109-813a67910ed1 {{(pid=61978) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1273.466500] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Created folder: Instances in parent group-v296026. [ 1273.466727] env[61978]: DEBUG oslo.service.loopingcall [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1273.467316] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1273.467316] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8948db4-bf93-4a83-97ca-9dd5a2716f1b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.495099] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1273.495099] env[61978]: value = "task-1395854" [ 1273.495099] env[61978]: _type = "Task" [ 1273.495099] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.503698] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395854, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.580865] env[61978]: DEBUG oslo_concurrency.lockutils [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1273.580865] env[61978]: DEBUG oslo_concurrency.lockutils [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.580865] env[61978]: DEBUG nova.network.neutron [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1273.637713] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.779s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1273.641384] env[61978]: DEBUG oslo_concurrency.lockutils [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.333s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1273.641673] env[61978]: DEBUG nova.objects.instance [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Lazy-loading 'resources' on Instance uuid f9b57cf4-f2e4-4d2a-9bd4-74952d46876d {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1273.664951] env[61978]: INFO nova.scheduler.client.report [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Deleted allocations for instance 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9 [ 1273.710359] env[61978]: DEBUG nova.compute.manager [req-166e374f-6057-493a-9156-4f72464eed28 req-e344d214-f01d-42a6-be73-4c1efc20871c service nova] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Received event network-changed-55da250a-bf6c-4f74-98c0-a25c3605a4df {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1273.710359] env[61978]: DEBUG nova.compute.manager [req-166e374f-6057-493a-9156-4f72464eed28 req-e344d214-f01d-42a6-be73-4c1efc20871c service nova] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Refreshing instance network info cache due to event network-changed-55da250a-bf6c-4f74-98c0-a25c3605a4df. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1273.710359] env[61978]: DEBUG oslo_concurrency.lockutils [req-166e374f-6057-493a-9156-4f72464eed28 req-e344d214-f01d-42a6-be73-4c1efc20871c service nova] Acquiring lock "refresh_cache-845ec88d-5d2b-479c-a2d1-fa235b2b87b3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1273.710359] env[61978]: DEBUG oslo_concurrency.lockutils [req-166e374f-6057-493a-9156-4f72464eed28 req-e344d214-f01d-42a6-be73-4c1efc20871c service nova] Acquired lock "refresh_cache-845ec88d-5d2b-479c-a2d1-fa235b2b87b3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.710359] env[61978]: DEBUG nova.network.neutron [req-166e374f-6057-493a-9156-4f72464eed28 req-e344d214-f01d-42a6-be73-4c1efc20871c service nova] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Refreshing network info cache for port 55da250a-bf6c-4f74-98c0-a25c3605a4df {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1273.769725] env[61978]: DEBUG oslo_concurrency.lockutils [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.988730] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1273.988960] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.989173] env[61978]: DEBUG nova.network.neutron [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1273.989377] env[61978]: DEBUG nova.objects.instance [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lazy-loading 'info_cache' on Instance uuid 4d357d46-8bbb-4228-a5a6-2ce67fe037d7 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1274.006060] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395854, 'name': CreateVM_Task, 'duration_secs': 0.365665} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.006210] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1274.006831] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1274.007038] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.007365] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1274.007628] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f099487-adce-4120-af57-aecbabe146c3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.012627] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Waiting for the task: (returnval){ [ 1274.012627] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]529d666a-233a-e704-3daa-0ba67ed5a8db" [ 1274.012627] env[61978]: _type = "Task" [ 1274.012627] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.020239] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529d666a-233a-e704-3daa-0ba67ed5a8db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.172788] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fae083bf-41e7-43a1-b9ae-241665b681b0 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.169s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.360718] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce8a062-77ab-4a76-96f7-8056a84f7581 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.368226] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd4d0c3-205d-49d9-a945-16b90d4a52be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.400020] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0e962d-0be7-40b1-b476-733851ee8392 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.407707] env[61978]: DEBUG nova.network.neutron [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance_info_cache with network_info: [{"id": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "address": "fa:16:3e:3b:95:21", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2d39b09-4a", "ovs_interfaceid": "d2d39b09-4acd-4f24-aa07-31e86f78f134", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.415076] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5f2163-3e51-49cd-965d-dc6f6e510c49 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.432624] env[61978]: DEBUG nova.compute.provider_tree [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1274.444382] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.448136] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.522505] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529d666a-233a-e704-3daa-0ba67ed5a8db, 'name': SearchDatastore_Task, 'duration_secs': 0.016043} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.522847] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1274.523609] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1274.523609] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1274.523609] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.523794] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1274.524238] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c108ecf-03ee-49e4-91bb-75945eec4976 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.538564] env[61978]: DEBUG nova.network.neutron [req-166e374f-6057-493a-9156-4f72464eed28 req-e344d214-f01d-42a6-be73-4c1efc20871c service nova] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Updated VIF entry in instance network info cache for port 55da250a-bf6c-4f74-98c0-a25c3605a4df. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1274.538929] env[61978]: DEBUG nova.network.neutron [req-166e374f-6057-493a-9156-4f72464eed28 req-e344d214-f01d-42a6-be73-4c1efc20871c service nova] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Updating instance_info_cache with network_info: [{"id": "55da250a-bf6c-4f74-98c0-a25c3605a4df", "address": "fa:16:3e:19:31:fd", "network": {"id": "41fe5f05-f058-4b13-8bd8-778d98d892da", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-643498706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e163315a8674fb8835344dbdc454e36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76f377cd-5966-49b4-9210-907f592c694e", "external-id": "nsx-vlan-transportzone-124", "segmentation_id": 124, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55da250a-bf", "ovs_interfaceid": "55da250a-bf6c-4f74-98c0-a25c3605a4df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.541384] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1274.541476] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1274.543353] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab718baf-6bf8-4757-9c81-0d31d1ca62b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.548205] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Waiting for the task: (returnval){ [ 1274.548205] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5205c8df-398d-bf7f-ef2c-6c39830be420" [ 1274.548205] env[61978]: _type = "Task" [ 1274.548205] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.556461] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5205c8df-398d-bf7f-ef2c-6c39830be420, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.911139] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquiring lock "de8abe58-e0c2-4eaf-b3a6-7106e0861080" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.911684] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lock "de8abe58-e0c2-4eaf-b3a6-7106e0861080" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.912045] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquiring lock "de8abe58-e0c2-4eaf-b3a6-7106e0861080-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.912385] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lock "de8abe58-e0c2-4eaf-b3a6-7106e0861080-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.913807] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lock "de8abe58-e0c2-4eaf-b3a6-7106e0861080-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.919948] env[61978]: INFO nova.compute.manager [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Terminating instance [ 1274.921460] env[61978]: DEBUG oslo_concurrency.lockutils [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-d3c82821-0617-4de6-8109-813a67910ed1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1274.922134] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquiring lock "refresh_cache-de8abe58-e0c2-4eaf-b3a6-7106e0861080" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1274.922339] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquired lock "refresh_cache-de8abe58-e0c2-4eaf-b3a6-7106e0861080" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.923321] env[61978]: DEBUG nova.network.neutron [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1274.924966] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7165bc52-ad99-468e-bb04-57b30518234c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.933438] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b546f90-6021-49b8-b963-b122c25399ed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.937792] env[61978]: DEBUG nova.scheduler.client.report [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1274.946145] env[61978]: DEBUG nova.compute.manager [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1275.043608] env[61978]: DEBUG oslo_concurrency.lockutils [req-166e374f-6057-493a-9156-4f72464eed28 req-e344d214-f01d-42a6-be73-4c1efc20871c service nova] Releasing lock "refresh_cache-845ec88d-5d2b-479c-a2d1-fa235b2b87b3" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1275.059051] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5205c8df-398d-bf7f-ef2c-6c39830be420, 'name': SearchDatastore_Task, 'duration_secs': 0.03641} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.059848] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-469fe620-1df2-46b2-9e09-95125a55baf2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.068471] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Waiting for the task: (returnval){ [ 1275.068471] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5270fb02-d8cc-d80c-2a34-33a35eaa874b" [ 1275.068471] env[61978]: _type = "Task" [ 1275.068471] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.082675] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5270fb02-d8cc-d80c-2a34-33a35eaa874b, 'name': SearchDatastore_Task, 'duration_secs': 0.010629} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.083149] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1275.083813] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 845ec88d-5d2b-479c-a2d1-fa235b2b87b3/845ec88d-5d2b-479c-a2d1-fa235b2b87b3.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1275.084245] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88150f7a-a716-4393-b334-4c74d82af2d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.101022] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Waiting for the task: (returnval){ [ 1275.101022] env[61978]: value = "task-1395855" [ 1275.101022] env[61978]: _type = "Task" [ 1275.101022] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.108872] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395855, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.331886] env[61978]: DEBUG nova.network.neutron [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance_info_cache with network_info: [{"id": "69d57c29-bde4-4e04-8f75-f8f4e410d10b", "address": "fa:16:3e:6b:47:c3", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69d57c29-bd", "ovs_interfaceid": "69d57c29-bde4-4e04-8f75-f8f4e410d10b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.443322] env[61978]: DEBUG oslo_concurrency.lockutils [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.803s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1275.448431] env[61978]: DEBUG oslo_concurrency.lockutils [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.682s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.450273] env[61978]: INFO nova.compute.claims [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1275.460593] env[61978]: DEBUG nova.network.neutron [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1275.484200] env[61978]: INFO nova.scheduler.client.report [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Deleted allocations for instance f9b57cf4-f2e4-4d2a-9bd4-74952d46876d [ 1275.500400] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.561478] env[61978]: DEBUG nova.network.neutron [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.610738] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395855, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472717} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.611116] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 845ec88d-5d2b-479c-a2d1-fa235b2b87b3/845ec88d-5d2b-479c-a2d1-fa235b2b87b3.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1275.611409] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1275.611725] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d277286d-89c8-416a-a180-3a70fda87e64 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.617927] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Waiting for the task: (returnval){ [ 1275.617927] env[61978]: value = "task-1395856" [ 1275.617927] env[61978]: _type = "Task" [ 1275.617927] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.628715] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395856, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.836314] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "refresh_cache-4d357d46-8bbb-4228-a5a6-2ce67fe037d7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1275.836314] env[61978]: DEBUG nova.objects.instance [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lazy-loading 'migration_context' on Instance uuid 4d357d46-8bbb-4228-a5a6-2ce67fe037d7 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1275.997023] env[61978]: DEBUG oslo_concurrency.lockutils [None req-79355095-a54b-456b-9e4a-7f2276a6de28 tempest-ServerAddressesNegativeTestJSON-670689881 tempest-ServerAddressesNegativeTestJSON-670689881-project-member] Lock "f9b57cf4-f2e4-4d2a-9bd4-74952d46876d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.793s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1276.053185] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1276.053185] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0045898f-4150-46ff-b5c3-9e9e4d6b395d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.060035] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1276.060035] env[61978]: value = "task-1395857" [ 1276.060035] env[61978]: _type = "Task" [ 1276.060035] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.063682] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Releasing lock "refresh_cache-de8abe58-e0c2-4eaf-b3a6-7106e0861080" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.064200] env[61978]: DEBUG nova.compute.manager [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1276.064312] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1276.065569] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5692d21-ef0a-4840-ae05-bec5a45101e5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.073279] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395857, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.076064] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1276.076064] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9dfd9aca-3728-4621-a6d8-b931425a98d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.082075] env[61978]: DEBUG oslo_vmware.api [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1276.082075] env[61978]: value = "task-1395858" [ 1276.082075] env[61978]: _type = "Task" [ 1276.082075] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.091195] env[61978]: DEBUG oslo_vmware.api [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.127257] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395856, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08267} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.127523] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1276.128343] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5ca49c-6aba-45c6-ac60-8163d2430e8b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.150824] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 845ec88d-5d2b-479c-a2d1-fa235b2b87b3/845ec88d-5d2b-479c-a2d1-fa235b2b87b3.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1276.151545] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0de19c24-118b-46ed-a991-cb0326d2664c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.171618] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Waiting for the task: (returnval){ [ 1276.171618] env[61978]: value = "task-1395859" [ 1276.171618] env[61978]: _type = "Task" [ 1276.171618] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.181596] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395859, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.341447] env[61978]: DEBUG nova.objects.base [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Object Instance<4d357d46-8bbb-4228-a5a6-2ce67fe037d7> lazy-loaded attributes: info_cache,migration_context {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1276.342592] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abea2561-9d34-462c-9313-d49ae8b27377 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.364451] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50cf2af9-01a9-4c38-81bd-24e9a819a2e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.370583] env[61978]: DEBUG oslo_vmware.api [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1276.370583] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e2dee0-bcaf-7af3-e3cb-ee3c0e29ebb4" [ 1276.370583] env[61978]: _type = "Task" [ 1276.370583] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.378842] env[61978]: DEBUG oslo_vmware.api [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e2dee0-bcaf-7af3-e3cb-ee3c0e29ebb4, 'name': SearchDatastore_Task, 'duration_secs': 0.006204} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.379127] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.571976] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395857, 'name': PowerOffVM_Task, 'duration_secs': 0.375584} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.572238] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1276.572911] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1276.573134] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1276.573294] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1276.573477] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1276.573651] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1276.573749] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1276.573963] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1276.574148] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1276.574325] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1276.574492] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1276.574710] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1276.579890] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47579970-d6ae-46e5-b224-72cd8adeee35 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.603224] env[61978]: DEBUG oslo_vmware.api [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395858, 'name': PowerOffVM_Task, 'duration_secs': 0.183388} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.604875] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1276.604875] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1276.605113] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1276.605113] env[61978]: value = "task-1395860" [ 1276.605113] env[61978]: _type = "Task" [ 1276.605113] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.605548] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c03a017-8bc9-4d46-85f0-8474b0cde5cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.622023] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395860, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.640667] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1276.640856] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1276.641065] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Deleting the datastore file [datastore2] de8abe58-e0c2-4eaf-b3a6-7106e0861080 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1276.641335] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30c708ac-be1e-41ee-9022-8aa7b30a418b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.647482] env[61978]: DEBUG oslo_vmware.api [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for the task: (returnval){ [ 1276.647482] env[61978]: value = "task-1395862" [ 1276.647482] env[61978]: _type = "Task" [ 1276.647482] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.658398] env[61978]: DEBUG oslo_vmware.api [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395862, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.663664] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3f06d4-5e0a-4877-8d1b-a1a003fe91ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.671065] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c06028-4ab5-442c-8429-cda958f0c275 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.709127] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "7823099f-efdf-46bf-85d7-69e105dfb02c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.709404] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "7823099f-efdf-46bf-85d7-69e105dfb02c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.710810] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395859, 'name': ReconfigVM_Task, 'duration_secs': 0.270153} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.711736] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4b8cc7-7452-4767-b837-e94dbe4ecd70 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.714523] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 845ec88d-5d2b-479c-a2d1-fa235b2b87b3/845ec88d-5d2b-479c-a2d1-fa235b2b87b3.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1276.715159] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f764884-d9ad-4a3a-9e40-2b56bdbf49c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.722584] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e3fde9-1e2f-4df3-abc3-e782d9c107b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.726642] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Waiting for the task: (returnval){ [ 1276.726642] env[61978]: value = "task-1395863" [ 1276.726642] env[61978]: _type = "Task" [ 1276.726642] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.738957] env[61978]: DEBUG nova.compute.provider_tree [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.745091] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395863, 'name': Rename_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.118763] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395860, 'name': ReconfigVM_Task, 'duration_secs': 0.240223} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.119574] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87884a2c-b49b-4e3e-b1fa-fb1cb26a16db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.149789] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1277.149789] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1277.149789] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1277.149789] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1277.149789] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1277.149789] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1277.149789] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1277.149789] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1277.149789] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1277.149789] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1277.149789] env[61978]: DEBUG nova.virt.hardware [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1277.149789] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7aec45f-c32a-47ba-8c72-c0d2a14517de {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.158282] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1277.158282] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522bb27e-0ecf-109e-fb9e-9ac7da688657" [ 1277.158282] env[61978]: _type = "Task" [ 1277.158282] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.161550] env[61978]: DEBUG oslo_vmware.api [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Task: {'id': task-1395862, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129329} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.164515] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1277.164613] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1277.164795] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1277.164978] env[61978]: INFO nova.compute.manager [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1277.165322] env[61978]: DEBUG oslo.service.loopingcall [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1277.165462] env[61978]: DEBUG nova.compute.manager [-] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1277.165558] env[61978]: DEBUG nova.network.neutron [-] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1277.173644] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522bb27e-0ecf-109e-fb9e-9ac7da688657, 'name': SearchDatastore_Task, 'duration_secs': 0.006316} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.177585] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfiguring VM instance instance-00000050 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1277.177928] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-290a5306-ff35-4578-935b-5f64a5b305bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.192762] env[61978]: DEBUG nova.network.neutron [-] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1277.201551] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1277.201551] env[61978]: value = "task-1395864" [ 1277.201551] env[61978]: _type = "Task" [ 1277.201551] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.211016] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395864, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.211438] env[61978]: DEBUG nova.compute.manager [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1277.237819] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395863, 'name': Rename_Task, 'duration_secs': 0.1504} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.238135] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1277.238391] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c7d850f-60fb-48d0-a3f5-d1d65f99cb45 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.242849] env[61978]: DEBUG nova.scheduler.client.report [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1277.246519] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Waiting for the task: (returnval){ [ 1277.246519] env[61978]: value = "task-1395865" [ 1277.246519] env[61978]: _type = "Task" [ 1277.246519] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.254918] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395865, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.695206] env[61978]: DEBUG nova.network.neutron [-] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.717353] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395864, 'name': ReconfigVM_Task, 'duration_secs': 0.268883} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.719516] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfigured VM instance instance-00000050 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1277.720787] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6242e43-d883-4731-b145-8638835da211 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.747913] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] d3c82821-0617-4de6-8109-813a67910ed1/d3c82821-0617-4de6-8109-813a67910ed1.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1277.749117] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.749868] env[61978]: DEBUG oslo_concurrency.lockutils [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.750247] env[61978]: DEBUG nova.compute.manager [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1277.752871] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c0d68f6-6222-4299-8621-ce40c957878f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.766536] env[61978]: DEBUG oslo_concurrency.lockutils [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 5.540s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.781175] env[61978]: DEBUG oslo_vmware.api [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395865, 'name': PowerOnVM_Task, 'duration_secs': 0.435428} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.782463] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1277.782545] env[61978]: INFO nova.compute.manager [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Took 6.72 seconds to spawn the instance on the hypervisor. [ 1277.782769] env[61978]: DEBUG nova.compute.manager [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1277.783525] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1277.783525] env[61978]: value = "task-1395866" [ 1277.783525] env[61978]: _type = "Task" [ 1277.783525] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.784741] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5899adc9-04d1-42d3-99c2-6e237bc98807 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.800291] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395866, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.199418] env[61978]: INFO nova.compute.manager [-] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Took 1.03 seconds to deallocate network for instance. [ 1278.267914] env[61978]: DEBUG nova.compute.utils [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1278.269224] env[61978]: DEBUG nova.compute.manager [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1278.269414] env[61978]: DEBUG nova.network.neutron [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1278.298032] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395866, 'name': ReconfigVM_Task, 'duration_secs': 0.476062} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.298664] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfigured VM instance instance-00000050 to attach disk [datastore2] d3c82821-0617-4de6-8109-813a67910ed1/d3c82821-0617-4de6-8109-813a67910ed1.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1278.299571] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92426c2-0019-4dae-b5cc-3973fd13ce5d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.331118] env[61978]: DEBUG nova.policy [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ab697d6ab4e4ece8b290afbf5ec1366', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a33ac41ae0247b59c400c6ed9145239', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1278.335058] env[61978]: INFO nova.compute.manager [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Took 19.45 seconds to build instance. [ 1278.336498] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966cc24f-699c-4ff6-8471-e8171be2baf9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.390031] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61f66bc-ca4f-4dcc-ae1d-35d2f139a2bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.390031] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a91eb5-ea80-41c4-b85a-070ca61bdbaf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.401043] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1278.401293] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b577557-3f06-4322-923a-d8c6d2a33b0d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.413673] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1278.413673] env[61978]: value = "task-1395867" [ 1278.413673] env[61978]: _type = "Task" [ 1278.413673] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.419952] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395867, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.526656] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918b801a-a3e4-49e9-931a-9b57218c0e56 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.535032] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b499382-71b4-4dd2-a1d8-fe5214d24b64 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.570186] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c0e3f0-68c8-4a90-92a1-95538b65227b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.578570] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71181207-2ba0-4f19-b14b-020cddeb60da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.593864] env[61978]: DEBUG nova.compute.provider_tree [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.708113] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.720259] env[61978]: DEBUG nova.network.neutron [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Successfully created port: f394483a-0b84-4d01-aee1-a50c3a3ee0ff {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1278.772352] env[61978]: DEBUG nova.compute.manager [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1278.841866] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d684022f-c4bb-4430-9317-78d45a9d18f2 tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Lock "845ec88d-5d2b-479c-a2d1-fa235b2b87b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.964s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.924086] env[61978]: DEBUG oslo_vmware.api [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395867, 'name': PowerOnVM_Task, 'duration_secs': 0.461442} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.924394] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1279.097804] env[61978]: DEBUG nova.scheduler.client.report [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1279.164978] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Acquiring lock "845ec88d-5d2b-479c-a2d1-fa235b2b87b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.165272] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Lock "845ec88d-5d2b-479c-a2d1-fa235b2b87b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.165491] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Acquiring lock "845ec88d-5d2b-479c-a2d1-fa235b2b87b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.165691] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Lock "845ec88d-5d2b-479c-a2d1-fa235b2b87b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.165873] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Lock "845ec88d-5d2b-479c-a2d1-fa235b2b87b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.168211] env[61978]: INFO nova.compute.manager [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Terminating instance [ 1279.169994] env[61978]: DEBUG nova.compute.manager [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1279.170217] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1279.171051] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59eeb16-7bb9-4b1d-9169-0f783debbe99 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.178912] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1279.179155] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b50c538a-2ebf-4e26-b7e5-7c6b8f576b3b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.185288] env[61978]: DEBUG oslo_vmware.api [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Waiting for the task: (returnval){ [ 1279.185288] env[61978]: value = "task-1395868" [ 1279.185288] env[61978]: _type = "Task" [ 1279.185288] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.192476] env[61978]: DEBUG oslo_vmware.api [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395868, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.278120] env[61978]: INFO nova.virt.block_device [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Booting with volume 73762ddd-195c-421a-95df-d5230c3e7c5e at /dev/sda [ 1279.318509] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a0e7f1f-250b-49a6-9f89-f0e6d69426e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.327650] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0784f000-4c03-4146-9293-60568b48794b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.354342] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f2796488-c84a-4c75-b835-6809fb30f329 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.364760] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f1b093-a7dc-4469-865b-b6b4c5cb13ae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.396833] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8edc2f73-d472-457c-a272-0b872d099c91 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.404656] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a857bc1f-2954-491f-802e-85e085912762 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.417723] env[61978]: DEBUG nova.virt.block_device [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating existing volume attachment record: 6e9b3c0d-1e91-411d-ba20-a19a0ed68ec0 {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1279.695334] env[61978]: DEBUG oslo_vmware.api [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395868, 'name': PowerOffVM_Task, 'duration_secs': 0.217197} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.695642] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1279.695873] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1279.696158] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2768174c-eae9-4ca3-8ca3-d4f6e4be438a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.761359] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1279.761589] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1279.761781] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Deleting the datastore file [datastore2] 845ec88d-5d2b-479c-a2d1-fa235b2b87b3 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1279.762057] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2bdf8a4-2280-42c5-bb14-4c6d8734569a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.767891] env[61978]: DEBUG oslo_vmware.api [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Waiting for the task: (returnval){ [ 1279.767891] env[61978]: value = "task-1395870" [ 1279.767891] env[61978]: _type = "Task" [ 1279.767891] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.776185] env[61978]: DEBUG oslo_vmware.api [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.967746] env[61978]: INFO nova.compute.manager [None req-56b76fcd-00ae-4130-9251-b17f2f265bb4 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance to original state: 'active' [ 1280.108016] env[61978]: DEBUG oslo_concurrency.lockutils [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.341s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.110839] env[61978]: DEBUG oslo_concurrency.lockutils [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 6.342s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.111057] env[61978]: DEBUG nova.objects.instance [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61978) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1280.277967] env[61978]: DEBUG oslo_vmware.api [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Task: {'id': task-1395870, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14462} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.278259] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1280.278513] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1280.278631] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1280.278812] env[61978]: INFO nova.compute.manager [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1280.279287] env[61978]: DEBUG oslo.service.loopingcall [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1280.279511] env[61978]: DEBUG nova.compute.manager [-] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1280.279612] env[61978]: DEBUG nova.network.neutron [-] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1280.388882] env[61978]: DEBUG nova.compute.manager [req-0ea9cea3-1972-4bf0-9830-ec374474709a req-da1dcba8-97c5-41fb-a528-80abd59b5fb5 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Received event network-vif-plugged-f394483a-0b84-4d01-aee1-a50c3a3ee0ff {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1280.389135] env[61978]: DEBUG oslo_concurrency.lockutils [req-0ea9cea3-1972-4bf0-9830-ec374474709a req-da1dcba8-97c5-41fb-a528-80abd59b5fb5 service nova] Acquiring lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.389353] env[61978]: DEBUG oslo_concurrency.lockutils [req-0ea9cea3-1972-4bf0-9830-ec374474709a req-da1dcba8-97c5-41fb-a528-80abd59b5fb5 service nova] Lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.389529] env[61978]: DEBUG oslo_concurrency.lockutils [req-0ea9cea3-1972-4bf0-9830-ec374474709a req-da1dcba8-97c5-41fb-a528-80abd59b5fb5 service nova] Lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.389704] env[61978]: DEBUG nova.compute.manager [req-0ea9cea3-1972-4bf0-9830-ec374474709a req-da1dcba8-97c5-41fb-a528-80abd59b5fb5 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] No waiting events found dispatching network-vif-plugged-f394483a-0b84-4d01-aee1-a50c3a3ee0ff {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1280.389875] env[61978]: WARNING nova.compute.manager [req-0ea9cea3-1972-4bf0-9830-ec374474709a req-da1dcba8-97c5-41fb-a528-80abd59b5fb5 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Received unexpected event network-vif-plugged-f394483a-0b84-4d01-aee1-a50c3a3ee0ff for instance with vm_state building and task_state block_device_mapping. [ 1280.565619] env[61978]: DEBUG nova.network.neutron [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Successfully updated port: f394483a-0b84-4d01-aee1-a50c3a3ee0ff {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1280.685877] env[61978]: INFO nova.scheduler.client.report [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleted allocation for migration e6b37eeb-7505-4485-ba8d-f4b9a4c26958 [ 1281.068278] env[61978]: DEBUG oslo_concurrency.lockutils [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.068354] env[61978]: DEBUG oslo_concurrency.lockutils [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.068511] env[61978]: DEBUG nova.network.neutron [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1281.122368] env[61978]: DEBUG oslo_concurrency.lockutils [None req-644001a0-a365-42e7-bc49-60bb22f60585 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.124051] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.623s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.125172] env[61978]: INFO nova.compute.claims [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1281.168400] env[61978]: DEBUG nova.network.neutron [-] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.194070] env[61978]: DEBUG oslo_concurrency.lockutils [None req-292d58cb-bcae-4c7d-bfeb-875b69951466 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.380s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.354918] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "d3c82821-0617-4de6-8109-813a67910ed1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.355237] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "d3c82821-0617-4de6-8109-813a67910ed1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.355467] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "d3c82821-0617-4de6-8109-813a67910ed1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.355661] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "d3c82821-0617-4de6-8109-813a67910ed1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.355873] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "d3c82821-0617-4de6-8109-813a67910ed1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.358440] env[61978]: INFO nova.compute.manager [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Terminating instance [ 1281.360506] env[61978]: DEBUG nova.compute.manager [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1281.360732] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1281.360978] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf63c9ea-0fde-4c39-978b-bc1377e51135 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.367970] env[61978]: DEBUG oslo_vmware.api [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1281.367970] env[61978]: value = "task-1395871" [ 1281.367970] env[61978]: _type = "Task" [ 1281.367970] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.375752] env[61978]: DEBUG oslo_vmware.api [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395871, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.500568] env[61978]: DEBUG nova.compute.manager [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1281.501096] env[61978]: DEBUG nova.virt.hardware [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1281.501334] env[61978]: DEBUG nova.virt.hardware [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1281.501560] env[61978]: DEBUG nova.virt.hardware [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1281.501784] env[61978]: DEBUG nova.virt.hardware [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1281.501908] env[61978]: DEBUG nova.virt.hardware [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1281.502076] env[61978]: DEBUG nova.virt.hardware [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1281.502298] env[61978]: DEBUG nova.virt.hardware [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1281.502464] env[61978]: DEBUG nova.virt.hardware [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1281.502641] env[61978]: DEBUG nova.virt.hardware [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1281.502813] env[61978]: DEBUG nova.virt.hardware [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1281.502994] env[61978]: DEBUG nova.virt.hardware [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1281.503898] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d86bc7-4379-4519-96a8-3265e96a8b1b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.515804] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98297c0c-5b84-4d67-8305-e318e5f3b728 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.599219] env[61978]: DEBUG nova.network.neutron [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1281.671569] env[61978]: INFO nova.compute.manager [-] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Took 1.39 seconds to deallocate network for instance. [ 1281.736800] env[61978]: DEBUG nova.network.neutron [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance_info_cache with network_info: [{"id": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "address": "fa:16:3e:de:15:87", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf394483a-0b", "ovs_interfaceid": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.877996] env[61978]: DEBUG oslo_vmware.api [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395871, 'name': PowerOffVM_Task, 'duration_secs': 0.206218} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.878375] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1281.878619] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1281.878826] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296004', 'volume_id': 'fe3a2276-228e-421f-80d6-1ae89c15e505', 'name': 'volume-fe3a2276-228e-421f-80d6-1ae89c15e505', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'd3c82821-0617-4de6-8109-813a67910ed1', 'attached_at': '2024-11-04T15:10:38.000000', 'detached_at': '', 'volume_id': 'fe3a2276-228e-421f-80d6-1ae89c15e505', 'serial': 'fe3a2276-228e-421f-80d6-1ae89c15e505'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1281.879621] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa0dc3d-dd6c-410b-ac6a-99a20a6441ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.899865] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec79118-8b95-4c77-b69a-899611e05264 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.905805] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73495d4c-6906-4e01-9d6e-a6149a209f9c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.925049] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed8efeb-b56c-41dc-8d26-7c70e05e12a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.938715] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] The volume has not been displaced from its original location: [datastore2] volume-fe3a2276-228e-421f-80d6-1ae89c15e505/volume-fe3a2276-228e-421f-80d6-1ae89c15e505.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1281.943892] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfiguring VM instance instance-00000050 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1281.944158] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89e7f732-7eab-4149-bd0a-ff46e839fb75 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.961333] env[61978]: DEBUG oslo_vmware.api [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1281.961333] env[61978]: value = "task-1395872" [ 1281.961333] env[61978]: _type = "Task" [ 1281.961333] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.968246] env[61978]: DEBUG oslo_vmware.api [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395872, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.015628] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.015909] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.016156] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.016358] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.016539] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.018873] env[61978]: INFO nova.compute.manager [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Terminating instance [ 1282.021922] env[61978]: DEBUG nova.compute.manager [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1282.022147] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1282.022960] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593dff45-6560-4f83-9326-9d63837cd7ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.030059] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1282.030285] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-417319a9-e19b-4109-bedf-78a999659914 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.036747] env[61978]: DEBUG oslo_vmware.api [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1282.036747] env[61978]: value = "task-1395873" [ 1282.036747] env[61978]: _type = "Task" [ 1282.036747] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.044501] env[61978]: DEBUG oslo_vmware.api [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395873, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.178784] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.239696] env[61978]: DEBUG oslo_concurrency.lockutils [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1282.240015] env[61978]: DEBUG nova.compute.manager [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Instance network_info: |[{"id": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "address": "fa:16:3e:de:15:87", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf394483a-0b", "ovs_interfaceid": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1282.241030] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:15:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f394483a-0b84-4d01-aee1-a50c3a3ee0ff', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1282.248131] env[61978]: DEBUG oslo.service.loopingcall [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1282.250568] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1282.250989] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0fde74b-f2f8-44fb-bade-e606bc1e6e2b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.274040] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1282.274040] env[61978]: value = "task-1395874" [ 1282.274040] env[61978]: _type = "Task" [ 1282.274040] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.284735] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395874, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.296830] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681bcb24-47c1-4aa2-829f-1600900ecebf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.303736] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53ca151-f066-4b23-96d0-e8310eb85f65 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.334739] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2b57cb-1aba-4609-abd7-3ff7d9276106 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.341869] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c62fcda-c419-4130-a1a2-46af66a5719c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.354718] env[61978]: DEBUG nova.compute.provider_tree [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1282.418415] env[61978]: DEBUG nova.compute.manager [req-c7535042-50b7-463a-a716-906a00156b54 req-2482e1de-a678-49af-8f0b-2fa031dbea16 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Received event network-changed-f394483a-0b84-4d01-aee1-a50c3a3ee0ff {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1282.418653] env[61978]: DEBUG nova.compute.manager [req-c7535042-50b7-463a-a716-906a00156b54 req-2482e1de-a678-49af-8f0b-2fa031dbea16 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Refreshing instance network info cache due to event network-changed-f394483a-0b84-4d01-aee1-a50c3a3ee0ff. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1282.418847] env[61978]: DEBUG oslo_concurrency.lockutils [req-c7535042-50b7-463a-a716-906a00156b54 req-2482e1de-a678-49af-8f0b-2fa031dbea16 service nova] Acquiring lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.418994] env[61978]: DEBUG oslo_concurrency.lockutils [req-c7535042-50b7-463a-a716-906a00156b54 req-2482e1de-a678-49af-8f0b-2fa031dbea16 service nova] Acquired lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.419181] env[61978]: DEBUG nova.network.neutron [req-c7535042-50b7-463a-a716-906a00156b54 req-2482e1de-a678-49af-8f0b-2fa031dbea16 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Refreshing network info cache for port f394483a-0b84-4d01-aee1-a50c3a3ee0ff {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1282.471075] env[61978]: DEBUG oslo_vmware.api [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395872, 'name': ReconfigVM_Task, 'duration_secs': 0.210972} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.471075] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Reconfigured VM instance instance-00000050 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1282.475516] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73c95926-1950-4933-acc4-470570ac9d66 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.490944] env[61978]: DEBUG oslo_vmware.api [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1282.490944] env[61978]: value = "task-1395875" [ 1282.490944] env[61978]: _type = "Task" [ 1282.490944] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.499058] env[61978]: DEBUG oslo_vmware.api [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395875, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.548223] env[61978]: DEBUG oslo_vmware.api [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395873, 'name': PowerOffVM_Task, 'duration_secs': 0.205076} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.549069] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1282.549069] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1282.549239] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a469c78-b087-4aa8-bb0f-2916c9e4b6c3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.613462] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1282.613781] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1282.614016] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleting the datastore file [datastore1] 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1282.614293] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bb96bf1-5d33-4b91-b08b-3653afcb5cd7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.621404] env[61978]: DEBUG oslo_vmware.api [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1282.621404] env[61978]: value = "task-1395877" [ 1282.621404] env[61978]: _type = "Task" [ 1282.621404] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.629463] env[61978]: DEBUG oslo_vmware.api [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395877, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.783887] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395874, 'name': CreateVM_Task, 'duration_secs': 0.403305} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.784082] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1282.784791] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'disk_bus': None, 'device_type': None, 'attachment_id': '6e9b3c0d-1e91-411d-ba20-a19a0ed68ec0', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296021', 'volume_id': '73762ddd-195c-421a-95df-d5230c3e7c5e', 'name': 'volume-73762ddd-195c-421a-95df-d5230c3e7c5e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9', 'attached_at': '', 'detached_at': '', 'volume_id': '73762ddd-195c-421a-95df-d5230c3e7c5e', 'serial': '73762ddd-195c-421a-95df-d5230c3e7c5e'}, 'boot_index': 0, 'guest_format': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=61978) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1282.785035] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Root volume attach. Driver type: vmdk {{(pid=61978) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1282.785789] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5d95bf-7a33-4338-bb4a-e3046b360065 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.793029] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82718da-d268-4842-9c48-2e9844f16e9b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.798638] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3223bd-dcb4-4b46-9585-a8c2a447de90 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.804574] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-7b644fc6-58b4-423a-b0d9-ea6290ce0cec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.815674] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1282.815674] env[61978]: value = "task-1395878" [ 1282.815674] env[61978]: _type = "Task" [ 1282.815674] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.822998] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395878, 'name': RelocateVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.858092] env[61978]: DEBUG nova.scheduler.client.report [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1283.000556] env[61978]: DEBUG oslo_vmware.api [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395875, 'name': ReconfigVM_Task, 'duration_secs': 0.153173} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.002683] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296004', 'volume_id': 'fe3a2276-228e-421f-80d6-1ae89c15e505', 'name': 'volume-fe3a2276-228e-421f-80d6-1ae89c15e505', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'd3c82821-0617-4de6-8109-813a67910ed1', 'attached_at': '2024-11-04T15:10:38.000000', 'detached_at': '', 'volume_id': 'fe3a2276-228e-421f-80d6-1ae89c15e505', 'serial': 'fe3a2276-228e-421f-80d6-1ae89c15e505'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1283.002976] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1283.003756] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7c4ebf-aa97-44af-897e-166e65dd7f7a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.010545] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1283.010786] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05386470-2fc9-42e9-baa9-0ea4dd9ee6ec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.075864] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1283.076143] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1283.076356] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleting the datastore file [datastore2] d3c82821-0617-4de6-8109-813a67910ed1 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1283.076651] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-089e1a9c-3e27-42a4-a93a-6674924293a2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.083958] env[61978]: DEBUG oslo_vmware.api [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1283.083958] env[61978]: value = "task-1395880" [ 1283.083958] env[61978]: _type = "Task" [ 1283.083958] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.094493] env[61978]: DEBUG oslo_vmware.api [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395880, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.130895] env[61978]: DEBUG oslo_vmware.api [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395877, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184262} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.131772] env[61978]: DEBUG nova.network.neutron [req-c7535042-50b7-463a-a716-906a00156b54 req-2482e1de-a678-49af-8f0b-2fa031dbea16 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updated VIF entry in instance network info cache for port f394483a-0b84-4d01-aee1-a50c3a3ee0ff. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1283.132120] env[61978]: DEBUG nova.network.neutron [req-c7535042-50b7-463a-a716-906a00156b54 req-2482e1de-a678-49af-8f0b-2fa031dbea16 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance_info_cache with network_info: [{"id": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "address": "fa:16:3e:de:15:87", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf394483a-0b", "ovs_interfaceid": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.133290] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1283.133636] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1283.133836] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1283.134893] env[61978]: INFO nova.compute.manager [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1283.134893] env[61978]: DEBUG oslo.service.loopingcall [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1283.134893] env[61978]: DEBUG nova.compute.manager [-] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1283.134893] env[61978]: DEBUG nova.network.neutron [-] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1283.328228] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395878, 'name': RelocateVM_Task, 'duration_secs': 0.422561} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.328711] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1283.329089] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296021', 'volume_id': '73762ddd-195c-421a-95df-d5230c3e7c5e', 'name': 'volume-73762ddd-195c-421a-95df-d5230c3e7c5e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9', 'attached_at': '', 'detached_at': '', 'volume_id': '73762ddd-195c-421a-95df-d5230c3e7c5e', 'serial': '73762ddd-195c-421a-95df-d5230c3e7c5e'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1283.330324] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8457d4-6ee1-4507-b593-58f091babd32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.351502] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430179dd-d837-4487-99c0-43af815727a6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.366623] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.243s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.367205] env[61978]: DEBUG nova.compute.manager [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1283.377910] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] volume-73762ddd-195c-421a-95df-d5230c3e7c5e/volume-73762ddd-195c-421a-95df-d5230c3e7c5e.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1283.378463] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 6.999s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.379766] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b8093f5-85ed-46a2-8f2c-4823d931a86a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.401846] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1283.401846] env[61978]: value = "task-1395881" [ 1283.401846] env[61978]: _type = "Task" [ 1283.401846] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.412251] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395881, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.593394] env[61978]: DEBUG oslo_vmware.api [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395880, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210461} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.593692] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1283.593759] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1283.593892] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1283.594090] env[61978]: INFO nova.compute.manager [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Took 2.23 seconds to destroy the instance on the hypervisor. [ 1283.594342] env[61978]: DEBUG oslo.service.loopingcall [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1283.594539] env[61978]: DEBUG nova.compute.manager [-] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1283.594636] env[61978]: DEBUG nova.network.neutron [-] [instance: d3c82821-0617-4de6-8109-813a67910ed1] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1283.635508] env[61978]: DEBUG oslo_concurrency.lockutils [req-c7535042-50b7-463a-a716-906a00156b54 req-2482e1de-a678-49af-8f0b-2fa031dbea16 service nova] Releasing lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.635779] env[61978]: DEBUG nova.compute.manager [req-c7535042-50b7-463a-a716-906a00156b54 req-2482e1de-a678-49af-8f0b-2fa031dbea16 service nova] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Received event network-vif-deleted-55da250a-bf6c-4f74-98c0-a25c3605a4df {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1283.881961] env[61978]: DEBUG nova.compute.utils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1283.882973] env[61978]: DEBUG nova.compute.manager [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1283.882973] env[61978]: DEBUG nova.network.neutron [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1283.913630] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395881, 'name': ReconfigVM_Task, 'duration_secs': 0.503113} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.913630] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Reconfigured VM instance instance-00000062 to attach disk [datastore2] volume-73762ddd-195c-421a-95df-d5230c3e7c5e/volume-73762ddd-195c-421a-95df-d5230c3e7c5e.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1283.920084] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d916e4c2-b13a-4e46-9337-8e5306514dcb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.937017] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1283.937017] env[61978]: value = "task-1395882" [ 1283.937017] env[61978]: _type = "Task" [ 1283.937017] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.944488] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395882, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.947543] env[61978]: DEBUG nova.policy [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83e49dfbe8d44d23a25c9dba3d2eeb50', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4975f37c081466ab85cf1c21b750c10', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1284.054052] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a000e0-ba8f-4853-a654-050436c3171a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.061629] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6503ee69-c2a5-477a-9a4c-5627c896c95a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.099757] env[61978]: DEBUG nova.network.neutron [-] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.103404] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4095cde-e71e-4a15-a4fc-1dd3e8ed6388 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.112190] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9b4403-8f92-4d87-973d-575a64504966 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.131902] env[61978]: DEBUG nova.compute.provider_tree [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1284.300702] env[61978]: DEBUG nova.network.neutron [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Successfully created port: b790409d-8e9e-4942-9855-0974decac463 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1284.386357] env[61978]: DEBUG nova.compute.manager [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1284.447958] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395882, 'name': ReconfigVM_Task, 'duration_secs': 0.141483} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.447958] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296021', 'volume_id': '73762ddd-195c-421a-95df-d5230c3e7c5e', 'name': 'volume-73762ddd-195c-421a-95df-d5230c3e7c5e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9', 'attached_at': '', 'detached_at': '', 'volume_id': '73762ddd-195c-421a-95df-d5230c3e7c5e', 'serial': '73762ddd-195c-421a-95df-d5230c3e7c5e'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1284.447958] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ceb805d6-4803-457f-b265-c712bd77f609 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.464274] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1284.464274] env[61978]: value = "task-1395883" [ 1284.464274] env[61978]: _type = "Task" [ 1284.464274] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.473184] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395883, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.506082] env[61978]: DEBUG nova.compute.manager [req-a4d8ef55-1c7c-4813-b767-831686852cb3 req-8d596906-5ded-4699-80cb-aa3874b83b41 service nova] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Received event network-vif-deleted-7417d7e9-723d-408d-bfa4-e583af757e79 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1284.506330] env[61978]: DEBUG nova.compute.manager [req-a4d8ef55-1c7c-4813-b767-831686852cb3 req-8d596906-5ded-4699-80cb-aa3874b83b41 service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Received event network-vif-deleted-d2d39b09-4acd-4f24-aa07-31e86f78f134 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1284.506507] env[61978]: INFO nova.compute.manager [req-a4d8ef55-1c7c-4813-b767-831686852cb3 req-8d596906-5ded-4699-80cb-aa3874b83b41 service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Neutron deleted interface d2d39b09-4acd-4f24-aa07-31e86f78f134; detaching it from the instance and deleting it from the info cache [ 1284.506694] env[61978]: DEBUG nova.network.neutron [req-a4d8ef55-1c7c-4813-b767-831686852cb3 req-8d596906-5ded-4699-80cb-aa3874b83b41 service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.603942] env[61978]: INFO nova.compute.manager [-] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Took 1.47 seconds to deallocate network for instance. [ 1284.608549] env[61978]: DEBUG nova.network.neutron [-] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.636780] env[61978]: DEBUG nova.scheduler.client.report [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1284.974455] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395883, 'name': Rename_Task, 'duration_secs': 0.158414} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.974774] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1284.974958] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f9ef214-5bb6-451f-915d-40bbf979cb34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.982089] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1284.982089] env[61978]: value = "task-1395884" [ 1284.982089] env[61978]: _type = "Task" [ 1284.982089] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.993518] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395884, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.011726] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-194d9e69-b041-4cf7-81a9-a18133d7b0e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.020327] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbfe0ab-02c2-40c7-8ee7-9773731c3be9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.047100] env[61978]: DEBUG nova.compute.manager [req-a4d8ef55-1c7c-4813-b767-831686852cb3 req-8d596906-5ded-4699-80cb-aa3874b83b41 service nova] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Detach interface failed, port_id=d2d39b09-4acd-4f24-aa07-31e86f78f134, reason: Instance d3c82821-0617-4de6-8109-813a67910ed1 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1285.111127] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.111652] env[61978]: INFO nova.compute.manager [-] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Took 1.52 seconds to deallocate network for instance. [ 1285.395235] env[61978]: DEBUG nova.compute.manager [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1285.428055] env[61978]: DEBUG nova.virt.hardware [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1285.428450] env[61978]: DEBUG nova.virt.hardware [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1285.428706] env[61978]: DEBUG nova.virt.hardware [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1285.429012] env[61978]: DEBUG nova.virt.hardware [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1285.429300] env[61978]: DEBUG nova.virt.hardware [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1285.429547] env[61978]: DEBUG nova.virt.hardware [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1285.429867] env[61978]: DEBUG nova.virt.hardware [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1285.430143] env[61978]: DEBUG nova.virt.hardware [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1285.430418] env[61978]: DEBUG nova.virt.hardware [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1285.430687] env[61978]: DEBUG nova.virt.hardware [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1285.430963] env[61978]: DEBUG nova.virt.hardware [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1285.432257] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4a5e74-8771-4460-a385-932343cdf4ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.441663] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a10edc-eaec-41ae-92b7-8b0759136f1e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.493988] env[61978]: DEBUG oslo_vmware.api [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395884, 'name': PowerOnVM_Task, 'duration_secs': 0.435727} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.494311] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1285.494530] env[61978]: INFO nova.compute.manager [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Took 3.99 seconds to spawn the instance on the hypervisor. [ 1285.494716] env[61978]: DEBUG nova.compute.manager [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1285.495504] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1f84e5-a09e-4a57-8b6c-37908b29b8e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.645150] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.266s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1285.648097] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.899s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1285.649922] env[61978]: INFO nova.compute.claims [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1285.658475] env[61978]: INFO nova.compute.manager [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Took 0.55 seconds to detach 1 volumes for instance. [ 1285.803766] env[61978]: DEBUG nova.network.neutron [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Successfully updated port: b790409d-8e9e-4942-9855-0974decac463 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1286.017924] env[61978]: INFO nova.compute.manager [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Took 17.28 seconds to build instance. [ 1286.164854] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.208518] env[61978]: INFO nova.scheduler.client.report [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted allocation for migration 31398f15-05ee-49eb-8ce0-4d60c312ca83 [ 1286.308326] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "refresh_cache-764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1286.308482] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquired lock "refresh_cache-764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.308638] env[61978]: DEBUG nova.network.neutron [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1286.521648] env[61978]: DEBUG oslo_concurrency.lockutils [None req-53ed2cce-5ff3-4423-b35d-50778a8d3c2f tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.791s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1286.544174] env[61978]: DEBUG nova.compute.manager [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Received event network-vif-plugged-b790409d-8e9e-4942-9855-0974decac463 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1286.544430] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] Acquiring lock "764fdf3c-a6ce-4cd6-9190-d2d43fded0fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.544643] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] Lock "764fdf3c-a6ce-4cd6-9190-d2d43fded0fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.544830] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] Lock "764fdf3c-a6ce-4cd6-9190-d2d43fded0fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1286.545014] env[61978]: DEBUG nova.compute.manager [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] No waiting events found dispatching network-vif-plugged-b790409d-8e9e-4942-9855-0974decac463 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1286.545188] env[61978]: WARNING nova.compute.manager [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Received unexpected event network-vif-plugged-b790409d-8e9e-4942-9855-0974decac463 for instance with vm_state building and task_state spawning. [ 1286.545353] env[61978]: DEBUG nova.compute.manager [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Received event network-changed-b790409d-8e9e-4942-9855-0974decac463 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1286.545511] env[61978]: DEBUG nova.compute.manager [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Refreshing instance network info cache due to event network-changed-b790409d-8e9e-4942-9855-0974decac463. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1286.545681] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] Acquiring lock "refresh_cache-764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1286.714180] env[61978]: DEBUG oslo_concurrency.lockutils [None req-cc2d625c-d6bd-42e1-9b6e-4e87a1a79153 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 13.297s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1286.803736] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6cc46b-44ad-4cc4-9238-9d14787ee857 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.812971] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098b6a5f-a2bd-4b2e-8b73-d86bbdeee37f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.849290] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b4be89-cc19-413e-97c5-9ee1948bfdac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.857305] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac066e3-68c4-4ff4-beb7-dea87d201cf4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.861894] env[61978]: DEBUG nova.network.neutron [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1286.873643] env[61978]: DEBUG nova.compute.provider_tree [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1287.012878] env[61978]: DEBUG nova.network.neutron [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Updating instance_info_cache with network_info: [{"id": "b790409d-8e9e-4942-9855-0974decac463", "address": "fa:16:3e:5c:ae:b5", "network": {"id": "cf176714-7dd5-4d40-8f6d-7e46444187ca", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1909576930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4975f37c081466ab85cf1c21b750c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb790409d-8e", "ovs_interfaceid": "b790409d-8e9e-4942-9855-0974decac463", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.376983] env[61978]: DEBUG nova.scheduler.client.report [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1287.517603] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Releasing lock "refresh_cache-764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1287.517603] env[61978]: DEBUG nova.compute.manager [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Instance network_info: |[{"id": "b790409d-8e9e-4942-9855-0974decac463", "address": "fa:16:3e:5c:ae:b5", "network": {"id": "cf176714-7dd5-4d40-8f6d-7e46444187ca", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1909576930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4975f37c081466ab85cf1c21b750c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb790409d-8e", "ovs_interfaceid": "b790409d-8e9e-4942-9855-0974decac463", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1287.517603] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] Acquired lock "refresh_cache-764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.517603] env[61978]: DEBUG nova.network.neutron [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Refreshing network info cache for port b790409d-8e9e-4942-9855-0974decac463 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1287.517603] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:ae:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db068f71-08cc-42d4-8ab6-17134c1585e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b790409d-8e9e-4942-9855-0974decac463', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1287.526085] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Creating folder: Project (f4975f37c081466ab85cf1c21b750c10). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1287.527078] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e94325cc-b9ae-40a4-b716-2b0dc7a7a613 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.545300] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Created folder: Project (f4975f37c081466ab85cf1c21b750c10) in parent group-v295764. [ 1287.545776] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Creating folder: Instances. Parent ref: group-v296030. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1287.546097] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-139b6f2d-f5ef-4541-89cf-e94f84241fa8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.555853] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Created folder: Instances in parent group-v296030. [ 1287.556201] env[61978]: DEBUG oslo.service.loopingcall [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1287.556448] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1287.556688] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21d05fde-9a11-461f-a59c-5608f484860a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.576390] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1287.576390] env[61978]: value = "task-1395887" [ 1287.576390] env[61978]: _type = "Task" [ 1287.576390] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.584623] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395887, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.854444] env[61978]: DEBUG nova.compute.manager [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Stashing vm_state: active {{(pid=61978) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1287.881550] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.882086] env[61978]: DEBUG nova.compute.manager [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1287.884792] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.177s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.885153] env[61978]: DEBUG nova.objects.instance [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lazy-loading 'resources' on Instance uuid de8abe58-e0c2-4eaf-b3a6-7106e0861080 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1288.086840] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395887, 'name': CreateVM_Task, 'duration_secs': 0.342982} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.087009] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1288.087812] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.088017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.088359] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1288.088624] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e29c7a0-be3a-41b7-83a6-c96ea1350fdb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.093185] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1288.093185] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522582b4-bad8-b12e-1b77-1f1cef1be7be" [ 1288.093185] env[61978]: _type = "Task" [ 1288.093185] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.101597] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522582b4-bad8-b12e-1b77-1f1cef1be7be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.253748] env[61978]: DEBUG nova.network.neutron [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Updated VIF entry in instance network info cache for port b790409d-8e9e-4942-9855-0974decac463. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1288.253948] env[61978]: DEBUG nova.network.neutron [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Updating instance_info_cache with network_info: [{"id": "b790409d-8e9e-4942-9855-0974decac463", "address": "fa:16:3e:5c:ae:b5", "network": {"id": "cf176714-7dd5-4d40-8f6d-7e46444187ca", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1909576930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4975f37c081466ab85cf1c21b750c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb790409d-8e", "ovs_interfaceid": "b790409d-8e9e-4942-9855-0974decac463", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.301324] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.301653] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.301907] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.302125] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.302309] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.304795] env[61978]: INFO nova.compute.manager [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Terminating instance [ 1288.306730] env[61978]: DEBUG nova.compute.manager [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1288.306971] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1288.307838] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe19110-5bca-4ae2-bb4d-8f020753d0a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.315658] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1288.315905] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6da280a3-2027-4b43-9e5b-8322b178c66f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.322978] env[61978]: DEBUG oslo_vmware.api [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1288.322978] env[61978]: value = "task-1395888" [ 1288.322978] env[61978]: _type = "Task" [ 1288.322978] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.332322] env[61978]: DEBUG oslo_vmware.api [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395888, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.374989] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.388707] env[61978]: DEBUG nova.compute.utils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1288.390426] env[61978]: DEBUG nova.compute.manager [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1288.390643] env[61978]: DEBUG nova.network.neutron [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1288.434624] env[61978]: DEBUG nova.policy [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '346f982562de48fab1702aca567113ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a4f29a959447159b2f7d194ea94873', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1288.561228] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2925618-9595-46b3-b611-8d4e696e905d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.569918] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad340cbd-4188-41e5-b838-2a3ba3c4acf8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.601834] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd807561-8ff1-4f57-ba70-52fabb42b7ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.609799] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522582b4-bad8-b12e-1b77-1f1cef1be7be, 'name': SearchDatastore_Task, 'duration_secs': 0.010111} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.611831] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1288.612094] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1288.612344] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.612498] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.612680] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1288.612974] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-712bc2fa-d842-447b-9432-1ba96e4d9d00 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.615532] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cab9c6-1979-4026-8ae5-7656168d40a4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.629021] env[61978]: DEBUG nova.compute.provider_tree [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1288.632013] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1288.632216] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1288.632903] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e355f090-de6d-4302-8681-dc01560ef016 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.638999] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1288.638999] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520d0fe5-988d-3d2b-b4f1-dfc65963066f" [ 1288.638999] env[61978]: _type = "Task" [ 1288.638999] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.647907] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520d0fe5-988d-3d2b-b4f1-dfc65963066f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.718169] env[61978]: DEBUG nova.network.neutron [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Successfully created port: 377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1288.757403] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] Releasing lock "refresh_cache-764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1288.757680] env[61978]: DEBUG nova.compute.manager [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Received event network-changed-2daa968c-ac9c-4f15-ad2b-7977f5581ef1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1288.757952] env[61978]: DEBUG nova.compute.manager [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Refreshing instance network info cache due to event network-changed-2daa968c-ac9c-4f15-ad2b-7977f5581ef1. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1288.758319] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] Acquiring lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1288.758492] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] Acquired lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.758677] env[61978]: DEBUG nova.network.neutron [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Refreshing network info cache for port 2daa968c-ac9c-4f15-ad2b-7977f5581ef1 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1288.833720] env[61978]: DEBUG oslo_vmware.api [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395888, 'name': PowerOffVM_Task, 'duration_secs': 0.226357} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.834063] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1288.834248] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1288.835063] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e23343ed-ceb3-489a-8a34-1d9d596c5e2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.843147] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.843403] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.895702] env[61978]: DEBUG nova.compute.manager [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1288.902686] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1288.902686] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1288.902686] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleting the datastore file [datastore2] 4d357d46-8bbb-4228-a5a6-2ce67fe037d7 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1288.902686] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fddbd308-04e1-45ec-a90f-e210003cb2f3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.908549] env[61978]: DEBUG oslo_vmware.api [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1288.908549] env[61978]: value = "task-1395890" [ 1288.908549] env[61978]: _type = "Task" [ 1288.908549] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.917315] env[61978]: DEBUG oslo_vmware.api [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395890, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.132648] env[61978]: DEBUG nova.scheduler.client.report [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1289.149805] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520d0fe5-988d-3d2b-b4f1-dfc65963066f, 'name': SearchDatastore_Task, 'duration_secs': 0.009504} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.151258] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e991ecca-4e1b-4eb1-9a03-6cc2d7e531c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.156822] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1289.156822] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a58502-0e64-62fb-8d59-695b5095fa44" [ 1289.156822] env[61978]: _type = "Task" [ 1289.156822] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.166112] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a58502-0e64-62fb-8d59-695b5095fa44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.345824] env[61978]: DEBUG nova.compute.manager [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1289.417809] env[61978]: DEBUG oslo_vmware.api [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395890, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.467968} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.418112] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1289.418307] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1289.418927] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1289.418927] env[61978]: INFO nova.compute.manager [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1289.418927] env[61978]: DEBUG oslo.service.loopingcall [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1289.419128] env[61978]: DEBUG nova.compute.manager [-] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1289.419258] env[61978]: DEBUG nova.network.neutron [-] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1289.531429] env[61978]: DEBUG nova.network.neutron [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updated VIF entry in instance network info cache for port 2daa968c-ac9c-4f15-ad2b-7977f5581ef1. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1289.531817] env[61978]: DEBUG nova.network.neutron [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updating instance_info_cache with network_info: [{"id": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "address": "fa:16:3e:0c:ea:2e", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2daa968c-ac", "ovs_interfaceid": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.638875] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.754s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.641309] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.463s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.641548] env[61978]: DEBUG nova.objects.instance [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Lazy-loading 'resources' on Instance uuid 845ec88d-5d2b-479c-a2d1-fa235b2b87b3 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1289.661392] env[61978]: INFO nova.scheduler.client.report [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Deleted allocations for instance de8abe58-e0c2-4eaf-b3a6-7106e0861080 [ 1289.668818] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a58502-0e64-62fb-8d59-695b5095fa44, 'name': SearchDatastore_Task, 'duration_secs': 0.010139} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.669388] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.669646] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa/764fdf3c-a6ce-4cd6-9190-d2d43fded0fa.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1289.671123] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5908633a-d0b6-44be-ab71-857bc2fe5367 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.680622] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1289.680622] env[61978]: value = "task-1395891" [ 1289.680622] env[61978]: _type = "Task" [ 1289.680622] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.690465] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395891, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.718590] env[61978]: DEBUG nova.compute.manager [req-36296bbf-7f7d-421d-a997-7cb69311f9e5 req-2b0ade4e-3057-444d-8eb4-a600dd14051a service nova] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Received event network-vif-deleted-69d57c29-bde4-4e04-8f75-f8f4e410d10b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1289.718802] env[61978]: INFO nova.compute.manager [req-36296bbf-7f7d-421d-a997-7cb69311f9e5 req-2b0ade4e-3057-444d-8eb4-a600dd14051a service nova] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Neutron deleted interface 69d57c29-bde4-4e04-8f75-f8f4e410d10b; detaching it from the instance and deleting it from the info cache [ 1289.718979] env[61978]: DEBUG nova.network.neutron [req-36296bbf-7f7d-421d-a997-7cb69311f9e5 req-2b0ade4e-3057-444d-8eb4-a600dd14051a service nova] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.865119] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.907923] env[61978]: DEBUG nova.compute.manager [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1289.938277] env[61978]: DEBUG nova.virt.hardware [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1289.938617] env[61978]: DEBUG nova.virt.hardware [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1289.938811] env[61978]: DEBUG nova.virt.hardware [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1289.939080] env[61978]: DEBUG nova.virt.hardware [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1289.939293] env[61978]: DEBUG nova.virt.hardware [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1289.939487] env[61978]: DEBUG nova.virt.hardware [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1289.939712] env[61978]: DEBUG nova.virt.hardware [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1289.939879] env[61978]: DEBUG nova.virt.hardware [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1289.940075] env[61978]: DEBUG nova.virt.hardware [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1289.940255] env[61978]: DEBUG nova.virt.hardware [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1289.940437] env[61978]: DEBUG nova.virt.hardware [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1289.941353] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c2e673-b35c-4147-b6a6-752ef7f680aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.950128] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30206090-1fed-49e2-94ee-845105806e81 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.034974] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] Releasing lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1290.035381] env[61978]: DEBUG nova.compute.manager [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Received event network-changed-f394483a-0b84-4d01-aee1-a50c3a3ee0ff {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1290.035618] env[61978]: DEBUG nova.compute.manager [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Refreshing instance network info cache due to event network-changed-f394483a-0b84-4d01-aee1-a50c3a3ee0ff. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1290.035884] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] Acquiring lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.036119] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] Acquired lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.036335] env[61978]: DEBUG nova.network.neutron [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Refreshing network info cache for port f394483a-0b84-4d01-aee1-a50c3a3ee0ff {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1290.173111] env[61978]: DEBUG oslo_concurrency.lockutils [None req-80608833-8ca3-45f4-980d-7d912ff646c6 tempest-ServerShowV254Test-1219610676 tempest-ServerShowV254Test-1219610676-project-member] Lock "de8abe58-e0c2-4eaf-b3a6-7106e0861080" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.261s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.201567] env[61978]: DEBUG nova.network.neutron [-] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.203158] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395891, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514038} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.205962] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa/764fdf3c-a6ce-4cd6-9190-d2d43fded0fa.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1290.206158] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1290.206620] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-38ae26f9-91c8-491a-a1f2-97b9b00b0ce3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.215011] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1290.215011] env[61978]: value = "task-1395892" [ 1290.215011] env[61978]: _type = "Task" [ 1290.215011] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.221222] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be4d5117-2cc3-4398-bbb9-8dc99cf6a212 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.225849] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395892, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.235722] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6345547-cf28-4857-a3cd-a50781940980 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.270709] env[61978]: DEBUG nova.compute.manager [req-36296bbf-7f7d-421d-a997-7cb69311f9e5 req-2b0ade4e-3057-444d-8eb4-a600dd14051a service nova] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Detach interface failed, port_id=69d57c29-bde4-4e04-8f75-f8f4e410d10b, reason: Instance 4d357d46-8bbb-4228-a5a6-2ce67fe037d7 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1290.272941] env[61978]: DEBUG nova.network.neutron [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Successfully updated port: 377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1290.343534] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbdf46f-c546-423c-ba5a-d8055e659950 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.351318] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e50828-a923-43fe-87d8-df3fd756ff03 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.381064] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8819c14c-77c4-406d-8876-49deaa79e521 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.391055] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0792aea9-35d3-47a5-9654-875f865349ac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.403557] env[61978]: DEBUG nova.compute.provider_tree [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.704319] env[61978]: INFO nova.compute.manager [-] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Took 1.28 seconds to deallocate network for instance. [ 1290.724338] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395892, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080789} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.724625] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1290.725427] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859dab90-cfd8-46b7-9ce3-2b9e6e95c81f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.746474] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa/764fdf3c-a6ce-4cd6-9190-d2d43fded0fa.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1290.747415] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f58ab8aa-0cfc-4a31-a809-f88dedb3456a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.761800] env[61978]: DEBUG nova.network.neutron [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updated VIF entry in instance network info cache for port f394483a-0b84-4d01-aee1-a50c3a3ee0ff. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1290.762184] env[61978]: DEBUG nova.network.neutron [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance_info_cache with network_info: [{"id": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "address": "fa:16:3e:de:15:87", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf394483a-0b", "ovs_interfaceid": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.771663] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1290.771663] env[61978]: value = "task-1395893" [ 1290.771663] env[61978]: _type = "Task" [ 1290.771663] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.780015] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.780176] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.780325] env[61978]: DEBUG nova.network.neutron [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1290.781326] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395893, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.906390] env[61978]: DEBUG nova.scheduler.client.report [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1291.210669] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.266618] env[61978]: DEBUG oslo_concurrency.lockutils [req-a5530985-cfd3-4c47-9f45-2f42495401c8 req-e2141963-df32-475a-9dbb-dff69e691470 service nova] Releasing lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.281953] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395893, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.315612] env[61978]: DEBUG nova.network.neutron [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1291.411865] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.770s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.414120] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.303s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.414404] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.416246] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.252s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.416445] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.418856] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.044s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.439335] env[61978]: INFO nova.scheduler.client.report [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleted allocations for instance 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c [ 1291.441259] env[61978]: INFO nova.scheduler.client.report [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Deleted allocations for instance 845ec88d-5d2b-479c-a2d1-fa235b2b87b3 [ 1291.452874] env[61978]: INFO nova.scheduler.client.report [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleted allocations for instance d3c82821-0617-4de6-8109-813a67910ed1 [ 1291.463949] env[61978]: DEBUG nova.network.neutron [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updating instance_info_cache with network_info: [{"id": "377707c6-c569-41b4-b460-d4ffd83a8c03", "address": "fa:16:3e:38:98:1d", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377707c6-c5", "ovs_interfaceid": "377707c6-c569-41b4-b460-d4ffd83a8c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.746615] env[61978]: DEBUG nova.compute.manager [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Received event network-vif-plugged-377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1291.746889] env[61978]: DEBUG oslo_concurrency.lockutils [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] Acquiring lock "7823099f-efdf-46bf-85d7-69e105dfb02c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.747272] env[61978]: DEBUG oslo_concurrency.lockutils [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] Lock "7823099f-efdf-46bf-85d7-69e105dfb02c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.747476] env[61978]: DEBUG oslo_concurrency.lockutils [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] Lock "7823099f-efdf-46bf-85d7-69e105dfb02c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.747658] env[61978]: DEBUG nova.compute.manager [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] No waiting events found dispatching network-vif-plugged-377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1291.747839] env[61978]: WARNING nova.compute.manager [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Received unexpected event network-vif-plugged-377707c6-c569-41b4-b460-d4ffd83a8c03 for instance with vm_state building and task_state spawning. [ 1291.748023] env[61978]: DEBUG nova.compute.manager [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Received event network-changed-377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1291.748188] env[61978]: DEBUG nova.compute.manager [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Refreshing instance network info cache due to event network-changed-377707c6-c569-41b4-b460-d4ffd83a8c03. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1291.748419] env[61978]: DEBUG oslo_concurrency.lockutils [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] Acquiring lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1291.782982] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395893, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.925580] env[61978]: INFO nova.compute.claims [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1291.952923] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c6789b9b-6ded-4412-b05d-f5508db5a5b0 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "59f32dd0-1faa-4059-9ef3-b177e8f4fa4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.937s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.957652] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c5799cfe-fd34-4172-a0e5-f436483f5aae tempest-InstanceActionsNegativeTestJSON-1875676607 tempest-InstanceActionsNegativeTestJSON-1875676607-project-member] Lock "845ec88d-5d2b-479c-a2d1-fa235b2b87b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.792s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.961842] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a2624e62-0914-4fd4-a7c3-2b4e585edd36 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "d3c82821-0617-4de6-8109-813a67910ed1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.607s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.967512] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.967752] env[61978]: DEBUG nova.compute.manager [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Instance network_info: |[{"id": "377707c6-c569-41b4-b460-d4ffd83a8c03", "address": "fa:16:3e:38:98:1d", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377707c6-c5", "ovs_interfaceid": "377707c6-c569-41b4-b460-d4ffd83a8c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1291.968054] env[61978]: DEBUG oslo_concurrency.lockutils [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] Acquired lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.968256] env[61978]: DEBUG nova.network.neutron [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Refreshing network info cache for port 377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1291.969279] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:98:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '377707c6-c569-41b4-b460-d4ffd83a8c03', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1291.981909] env[61978]: DEBUG oslo.service.loopingcall [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1291.985377] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1291.985850] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb3481a4-8d8a-45a4-9879-961e34d0c934 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.006158] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1292.006158] env[61978]: value = "task-1395894" [ 1292.006158] env[61978]: _type = "Task" [ 1292.006158] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.013983] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395894, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.201016] env[61978]: DEBUG nova.network.neutron [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updated VIF entry in instance network info cache for port 377707c6-c569-41b4-b460-d4ffd83a8c03. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1292.201463] env[61978]: DEBUG nova.network.neutron [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updating instance_info_cache with network_info: [{"id": "377707c6-c569-41b4-b460-d4ffd83a8c03", "address": "fa:16:3e:38:98:1d", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377707c6-c5", "ovs_interfaceid": "377707c6-c569-41b4-b460-d4ffd83a8c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.284219] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395893, 'name': ReconfigVM_Task, 'duration_secs': 1.104211} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.284529] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Reconfigured VM instance instance-00000063 to attach disk [datastore2] 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa/764fdf3c-a6ce-4cd6-9190-d2d43fded0fa.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1292.285141] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74e4d33a-8b5b-41f8-98f4-98e6aa72bd48 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.291306] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1292.291306] env[61978]: value = "task-1395895" [ 1292.291306] env[61978]: _type = "Task" [ 1292.291306] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.299232] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395895, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.433184] env[61978]: INFO nova.compute.resource_tracker [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating resource usage from migration 8f4c2454-40e4-4e41-87c6-8f2423d14fa4 [ 1292.519040] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395894, 'name': CreateVM_Task, 'duration_secs': 0.328736} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.521472] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1292.522619] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1292.522955] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.523406] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1292.523778] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b4d2776-7885-41b3-a484-c9c935d8c3d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.529383] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1292.529383] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fd13e4-35fa-a9bd-3346-5abc95a5ef51" [ 1292.529383] env[61978]: _type = "Task" [ 1292.529383] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.540902] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fd13e4-35fa-a9bd-3346-5abc95a5ef51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.567280] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5985ea48-12cb-4d15-9fac-6234a6d85cfc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.573921] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94deb5c6-32ca-4e33-a0ab-33369d61bcfe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.604206] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d0a8a1-f590-438e-8ce6-563868641a80 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.612206] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8818653e-5e21-40b2-8ae8-aff857335b22 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.627613] env[61978]: DEBUG nova.compute.provider_tree [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.704038] env[61978]: DEBUG oslo_concurrency.lockutils [req-e4a69e84-47ec-4a20-8769-cb41fe6011c1 req-d5b2fc2c-137a-42dc-9995-43dadde275da service nova] Releasing lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1292.801113] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395895, 'name': Rename_Task, 'duration_secs': 0.144955} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.801404] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1292.801659] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83a60ffc-58e0-40dc-be88-1913ef113f62 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.808168] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1292.808168] env[61978]: value = "task-1395896" [ 1292.808168] env[61978]: _type = "Task" [ 1292.808168] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.817334] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395896, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.046036] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fd13e4-35fa-a9bd-3346-5abc95a5ef51, 'name': SearchDatastore_Task, 'duration_secs': 0.011166} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.046036] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.046036] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1293.046036] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1293.046036] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.046036] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1293.046036] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58f14638-d4d3-480b-b5fa-5d4dcf52645b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.052703] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1293.052909] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1293.054894] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34e51280-862f-446c-ad04-9301f1475119 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.067380] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.069330] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.078153] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1293.078153] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bec778-be7c-3ff0-34b4-bf89f998fde1" [ 1293.078153] env[61978]: _type = "Task" [ 1293.078153] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.092488] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bec778-be7c-3ff0-34b4-bf89f998fde1, 'name': SearchDatastore_Task, 'duration_secs': 0.011298} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.095106] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38f8038c-1ac5-4a31-a397-84d959bad487 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.103956] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1293.103956] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ba2d77-6c63-6789-4d3d-6beaa4549b61" [ 1293.103956] env[61978]: _type = "Task" [ 1293.103956] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.114296] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ba2d77-6c63-6789-4d3d-6beaa4549b61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.130899] env[61978]: DEBUG nova.scheduler.client.report [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1293.317669] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395896, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.573794] env[61978]: DEBUG nova.compute.manager [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1293.614461] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ba2d77-6c63-6789-4d3d-6beaa4549b61, 'name': SearchDatastore_Task, 'duration_secs': 0.009928} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.614747] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1293.615031] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 7823099f-efdf-46bf-85d7-69e105dfb02c/7823099f-efdf-46bf-85d7-69e105dfb02c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1293.616036] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b2e0988-450a-4776-9140-7ddd4e37902f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.622544] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1293.622544] env[61978]: value = "task-1395897" [ 1293.622544] env[61978]: _type = "Task" [ 1293.622544] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.631178] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395897, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.636240] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.218s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.636441] env[61978]: INFO nova.compute.manager [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Migrating [ 1293.643725] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.778s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.644054] env[61978]: INFO nova.compute.claims [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1293.737394] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "ac1676dd-affa-49cd-9e7b-a301abcec232" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.737635] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "ac1676dd-affa-49cd-9e7b-a301abcec232" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.820139] env[61978]: DEBUG oslo_vmware.api [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395896, 'name': PowerOnVM_Task, 'duration_secs': 0.785019} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.820139] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1293.820139] env[61978]: INFO nova.compute.manager [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Took 8.42 seconds to spawn the instance on the hypervisor. [ 1293.820139] env[61978]: DEBUG nova.compute.manager [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1293.820625] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a201f3-0d7f-4a15-9ddf-a2df21b75684 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.100358] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.134710] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395897, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.156447] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1294.156725] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.156889] env[61978]: DEBUG nova.network.neutron [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1294.242033] env[61978]: DEBUG nova.compute.manager [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1294.339364] env[61978]: INFO nova.compute.manager [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Took 18.87 seconds to build instance. [ 1294.633759] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395897, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678676} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.634055] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 7823099f-efdf-46bf-85d7-69e105dfb02c/7823099f-efdf-46bf-85d7-69e105dfb02c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1294.634335] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1294.634536] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8dd9082-91c7-4ebe-8ca9-40d70beb50c5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.640962] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1294.640962] env[61978]: value = "task-1395898" [ 1294.640962] env[61978]: _type = "Task" [ 1294.640962] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.648304] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395898, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.765244] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.816936] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af11032-d9f4-4fac-a30e-d26316a42486 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.824647] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5c9d61-52b9-4419-b76a-9ee59d814e8f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.855663] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a45bee2d-ecda-442a-bdec-03a735dc013c tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.411s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1294.859108] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdcafa8-6eb0-4f84-8ea9-d456295d9311 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.866548] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c18c27c-a36f-404c-aedd-bf036b9efdb0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.880485] env[61978]: DEBUG nova.compute.provider_tree [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1294.985824] env[61978]: DEBUG nova.network.neutron [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance_info_cache with network_info: [{"id": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "address": "fa:16:3e:de:15:87", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf394483a-0b", "ovs_interfaceid": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.151387] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395898, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075986} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.151667] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1295.152444] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09b0d82-8dc4-42ee-8ac0-9c94001913cb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.174357] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 7823099f-efdf-46bf-85d7-69e105dfb02c/7823099f-efdf-46bf-85d7-69e105dfb02c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1295.174633] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8e6bc3e-4a9c-4420-a11d-889b675505b2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.197994] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1295.197994] env[61978]: value = "task-1395899" [ 1295.197994] env[61978]: _type = "Task" [ 1295.197994] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.205429] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395899, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.386326] env[61978]: DEBUG nova.scheduler.client.report [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1295.487102] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1295.690968] env[61978]: DEBUG nova.compute.manager [req-f08eaaf6-09fc-4000-b8df-333be95b0572 req-227aa3c5-c91b-4a0f-bc3c-89965167675c service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Received event network-changed-b790409d-8e9e-4942-9855-0974decac463 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1295.691195] env[61978]: DEBUG nova.compute.manager [req-f08eaaf6-09fc-4000-b8df-333be95b0572 req-227aa3c5-c91b-4a0f-bc3c-89965167675c service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Refreshing instance network info cache due to event network-changed-b790409d-8e9e-4942-9855-0974decac463. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1295.691485] env[61978]: DEBUG oslo_concurrency.lockutils [req-f08eaaf6-09fc-4000-b8df-333be95b0572 req-227aa3c5-c91b-4a0f-bc3c-89965167675c service nova] Acquiring lock "refresh_cache-764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1295.691577] env[61978]: DEBUG oslo_concurrency.lockutils [req-f08eaaf6-09fc-4000-b8df-333be95b0572 req-227aa3c5-c91b-4a0f-bc3c-89965167675c service nova] Acquired lock "refresh_cache-764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.691717] env[61978]: DEBUG nova.network.neutron [req-f08eaaf6-09fc-4000-b8df-333be95b0572 req-227aa3c5-c91b-4a0f-bc3c-89965167675c service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Refreshing network info cache for port b790409d-8e9e-4942-9855-0974decac463 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1295.708880] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395899, 'name': ReconfigVM_Task, 'duration_secs': 0.280074} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.709269] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 7823099f-efdf-46bf-85d7-69e105dfb02c/7823099f-efdf-46bf-85d7-69e105dfb02c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1295.709897] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d0459fe-3936-49d0-916e-95a80a3b7441 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.716415] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1295.716415] env[61978]: value = "task-1395900" [ 1295.716415] env[61978]: _type = "Task" [ 1295.716415] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.724795] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395900, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.891458] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.892069] env[61978]: DEBUG nova.compute.manager [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1295.894751] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.684s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.894962] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.896979] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.797s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.900813] env[61978]: INFO nova.compute.claims [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1295.927442] env[61978]: INFO nova.scheduler.client.report [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted allocations for instance 4d357d46-8bbb-4228-a5a6-2ce67fe037d7 [ 1296.234778] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395900, 'name': Rename_Task, 'duration_secs': 0.139893} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.236019] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1296.237741] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85dc8c3e-66b1-4647-b9e8-3d311b821b4d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.244268] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1296.244268] env[61978]: value = "task-1395901" [ 1296.244268] env[61978]: _type = "Task" [ 1296.244268] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.254627] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.409238] env[61978]: DEBUG nova.compute.utils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1296.414533] env[61978]: DEBUG nova.compute.manager [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1296.414740] env[61978]: DEBUG nova.network.neutron [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1296.439605] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0a2ba54d-912c-4f81-a07f-ea04677a33c2 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "4d357d46-8bbb-4228-a5a6-2ce67fe037d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.138s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.493396] env[61978]: DEBUG nova.policy [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f20b272502341bd80be470f98554d1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d95ebcafdca43b8a1636e21c7258803', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1296.539098] env[61978]: DEBUG nova.network.neutron [req-f08eaaf6-09fc-4000-b8df-333be95b0572 req-227aa3c5-c91b-4a0f-bc3c-89965167675c service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Updated VIF entry in instance network info cache for port b790409d-8e9e-4942-9855-0974decac463. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1296.539513] env[61978]: DEBUG nova.network.neutron [req-f08eaaf6-09fc-4000-b8df-333be95b0572 req-227aa3c5-c91b-4a0f-bc3c-89965167675c service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Updating instance_info_cache with network_info: [{"id": "b790409d-8e9e-4942-9855-0974decac463", "address": "fa:16:3e:5c:ae:b5", "network": {"id": "cf176714-7dd5-4d40-8f6d-7e46444187ca", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1909576930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4975f37c081466ab85cf1c21b750c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb790409d-8e", "ovs_interfaceid": "b790409d-8e9e-4942-9855-0974decac463", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.759257] env[61978]: DEBUG oslo_vmware.api [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395901, 'name': PowerOnVM_Task, 'duration_secs': 0.463666} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.759556] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1296.759797] env[61978]: INFO nova.compute.manager [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Took 6.85 seconds to spawn the instance on the hypervisor. [ 1296.759957] env[61978]: DEBUG nova.compute.manager [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1296.760771] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d9fb9a-83da-4d87-8f30-fdc2c59b3790 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.778149] env[61978]: DEBUG nova.network.neutron [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Successfully created port: 9306ddf1-11e6-4d9d-8cda-d4f9bf78420f {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1296.918619] env[61978]: DEBUG nova.compute.manager [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1297.002826] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83e1bf2-11e6-44f0-a658-a47bc590ef5a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.028088] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance 'c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9' progress to 0 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1297.045177] env[61978]: DEBUG oslo_concurrency.lockutils [req-f08eaaf6-09fc-4000-b8df-333be95b0572 req-227aa3c5-c91b-4a0f-bc3c-89965167675c service nova] Releasing lock "refresh_cache-764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.060413] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317a1778-f4cf-4712-a999-b025ea0094ca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.068347] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c2fa11-383c-4e75-a1c5-97551d802f65 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.099942] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6884a925-bb17-4a44-8ecc-3adef25b90b1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.108303] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f06ee6-30ad-4043-85e6-cbf0f863facf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.124930] env[61978]: DEBUG nova.compute.provider_tree [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1297.279951] env[61978]: INFO nova.compute.manager [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Took 19.56 seconds to build instance. [ 1297.536365] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1297.536693] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2eb6ee4-5e40-4570-8ec5-271c9e8f946b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.546622] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1297.546622] env[61978]: value = "task-1395902" [ 1297.546622] env[61978]: _type = "Task" [ 1297.546622] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.557403] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.628798] env[61978]: DEBUG nova.scheduler.client.report [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1297.722697] env[61978]: DEBUG nova.compute.manager [req-df3acf6a-442e-4bc2-bd38-05133ce3f5dc req-8e9628da-8ae9-4b4a-8656-2cc6c1ac97ae service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Received event network-changed-b790409d-8e9e-4942-9855-0974decac463 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1297.722915] env[61978]: DEBUG nova.compute.manager [req-df3acf6a-442e-4bc2-bd38-05133ce3f5dc req-8e9628da-8ae9-4b4a-8656-2cc6c1ac97ae service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Refreshing instance network info cache due to event network-changed-b790409d-8e9e-4942-9855-0974decac463. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1297.723174] env[61978]: DEBUG oslo_concurrency.lockutils [req-df3acf6a-442e-4bc2-bd38-05133ce3f5dc req-8e9628da-8ae9-4b4a-8656-2cc6c1ac97ae service nova] Acquiring lock "refresh_cache-764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.723326] env[61978]: DEBUG oslo_concurrency.lockutils [req-df3acf6a-442e-4bc2-bd38-05133ce3f5dc req-8e9628da-8ae9-4b4a-8656-2cc6c1ac97ae service nova] Acquired lock "refresh_cache-764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.723493] env[61978]: DEBUG nova.network.neutron [req-df3acf6a-442e-4bc2-bd38-05133ce3f5dc req-8e9628da-8ae9-4b4a-8656-2cc6c1ac97ae service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Refreshing network info cache for port b790409d-8e9e-4942-9855-0974decac463 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1297.785271] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f03f733c-efcd-4dd3-be32-cf2ff5821520 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "7823099f-efdf-46bf-85d7-69e105dfb02c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.073s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.928435] env[61978]: DEBUG nova.compute.manager [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1297.962035] env[61978]: DEBUG nova.virt.hardware [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1297.963041] env[61978]: DEBUG nova.virt.hardware [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1297.963186] env[61978]: DEBUG nova.virt.hardware [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1297.967017] env[61978]: DEBUG nova.virt.hardware [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1297.967017] env[61978]: DEBUG nova.virt.hardware [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1297.967017] env[61978]: DEBUG nova.virt.hardware [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1297.967017] env[61978]: DEBUG nova.virt.hardware [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1297.967017] env[61978]: DEBUG nova.virt.hardware [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1297.967017] env[61978]: DEBUG nova.virt.hardware [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1297.967017] env[61978]: DEBUG nova.virt.hardware [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1297.967017] env[61978]: DEBUG nova.virt.hardware [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1297.967017] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7aa281-2254-4452-923a-dfd1b1f19646 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.972024] env[61978]: DEBUG nova.compute.manager [req-fbddeca8-62e7-46b5-86d1-5c1588de9c1c req-095aa575-75b5-45a2-a8b9-a7580e0a20ba service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Received event network-changed-377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1297.972200] env[61978]: DEBUG nova.compute.manager [req-fbddeca8-62e7-46b5-86d1-5c1588de9c1c req-095aa575-75b5-45a2-a8b9-a7580e0a20ba service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Refreshing instance network info cache due to event network-changed-377707c6-c569-41b4-b460-d4ffd83a8c03. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1297.972424] env[61978]: DEBUG oslo_concurrency.lockutils [req-fbddeca8-62e7-46b5-86d1-5c1588de9c1c req-095aa575-75b5-45a2-a8b9-a7580e0a20ba service nova] Acquiring lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.972573] env[61978]: DEBUG oslo_concurrency.lockutils [req-fbddeca8-62e7-46b5-86d1-5c1588de9c1c req-095aa575-75b5-45a2-a8b9-a7580e0a20ba service nova] Acquired lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.972874] env[61978]: DEBUG nova.network.neutron [req-fbddeca8-62e7-46b5-86d1-5c1588de9c1c req-095aa575-75b5-45a2-a8b9-a7580e0a20ba service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Refreshing network info cache for port 377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1297.979501] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426af209-1a49-40ea-bf2c-a76e3d1be94f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.055978] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395902, 'name': PowerOffVM_Task, 'duration_secs': 0.369823} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.056298] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1298.056494] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance 'c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9' progress to 17 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1298.137021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.237s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.137021] env[61978]: DEBUG nova.compute.manager [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1298.138241] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.373s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.140114] env[61978]: INFO nova.compute.claims [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1298.465271] env[61978]: DEBUG nova.network.neutron [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Successfully updated port: 9306ddf1-11e6-4d9d-8cda-d4f9bf78420f {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1298.562788] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1298.563209] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1298.563472] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1298.563615] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1298.563804] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1298.565865] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1298.565865] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1298.566497] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1298.566789] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1298.567257] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1298.567627] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1298.574806] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-987baddc-a859-47ac-b549-2b7ba9920426 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.596578] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1298.596578] env[61978]: value = "task-1395903" [ 1298.596578] env[61978]: _type = "Task" [ 1298.596578] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.612717] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395903, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.644955] env[61978]: DEBUG nova.compute.utils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1298.650351] env[61978]: DEBUG nova.compute.manager [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1298.650495] env[61978]: DEBUG nova.network.neutron [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1298.715774] env[61978]: DEBUG nova.network.neutron [req-df3acf6a-442e-4bc2-bd38-05133ce3f5dc req-8e9628da-8ae9-4b4a-8656-2cc6c1ac97ae service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Updated VIF entry in instance network info cache for port b790409d-8e9e-4942-9855-0974decac463. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1298.716110] env[61978]: DEBUG nova.network.neutron [req-df3acf6a-442e-4bc2-bd38-05133ce3f5dc req-8e9628da-8ae9-4b4a-8656-2cc6c1ac97ae service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Updating instance_info_cache with network_info: [{"id": "b790409d-8e9e-4942-9855-0974decac463", "address": "fa:16:3e:5c:ae:b5", "network": {"id": "cf176714-7dd5-4d40-8f6d-7e46444187ca", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1909576930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4975f37c081466ab85cf1c21b750c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb790409d-8e", "ovs_interfaceid": "b790409d-8e9e-4942-9855-0974decac463", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.759831] env[61978]: DEBUG nova.policy [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7026a28592af41ebb4dd7df6cfa33feb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2af733ffc4384fa1a2c59f4a45f1778c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1298.953265] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "b4541d84-b4c3-4441-b5a7-90de2dac3562" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.953543] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "b4541d84-b4c3-4441-b5a7-90de2dac3562" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.974740] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1298.975079] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.975269] env[61978]: DEBUG nova.network.neutron [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1299.007427] env[61978]: DEBUG nova.network.neutron [req-fbddeca8-62e7-46b5-86d1-5c1588de9c1c req-095aa575-75b5-45a2-a8b9-a7580e0a20ba service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updated VIF entry in instance network info cache for port 377707c6-c569-41b4-b460-d4ffd83a8c03. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1299.008700] env[61978]: DEBUG nova.network.neutron [req-fbddeca8-62e7-46b5-86d1-5c1588de9c1c req-095aa575-75b5-45a2-a8b9-a7580e0a20ba service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updating instance_info_cache with network_info: [{"id": "377707c6-c569-41b4-b460-d4ffd83a8c03", "address": "fa:16:3e:38:98:1d", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377707c6-c5", "ovs_interfaceid": "377707c6-c569-41b4-b460-d4ffd83a8c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.117986] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395903, 'name': ReconfigVM_Task, 'duration_secs': 0.179786} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.118745] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance 'c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9' progress to 33 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1299.152438] env[61978]: DEBUG nova.compute.manager [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1299.219723] env[61978]: DEBUG oslo_concurrency.lockutils [req-df3acf6a-442e-4bc2-bd38-05133ce3f5dc req-8e9628da-8ae9-4b4a-8656-2cc6c1ac97ae service nova] Releasing lock "refresh_cache-764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1299.347811] env[61978]: DEBUG nova.network.neutron [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Successfully created port: 2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1299.363220] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8bf3b6f-6917-4af7-a3a3-ae0217ca816b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.372911] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6ae13f-cda8-4989-846d-02ade1291543 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.407310] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca3c2be-a654-46fc-85dc-1b0c46b0382f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.415511] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73ee16f-17a6-4c3d-82bb-1af1526c6f09 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.431181] env[61978]: DEBUG nova.compute.provider_tree [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.459751] env[61978]: DEBUG nova.compute.manager [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1299.512396] env[61978]: DEBUG oslo_concurrency.lockutils [req-fbddeca8-62e7-46b5-86d1-5c1588de9c1c req-095aa575-75b5-45a2-a8b9-a7580e0a20ba service nova] Releasing lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1299.538758] env[61978]: DEBUG nova.network.neutron [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1299.628477] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1299.628477] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1299.628933] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1299.628933] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1299.628933] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1299.629258] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1299.629424] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1299.632342] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1299.632342] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1299.632342] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1299.632342] env[61978]: DEBUG nova.virt.hardware [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1299.637143] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Reconfiguring VM instance instance-00000062 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1299.637474] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4daba502-35fd-4651-ba66-4ae24dc40572 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.668225] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1299.668225] env[61978]: value = "task-1395904" [ 1299.668225] env[61978]: _type = "Task" [ 1299.668225] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.675039] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395904, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.783520] env[61978]: DEBUG nova.network.neutron [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance_info_cache with network_info: [{"id": "9306ddf1-11e6-4d9d-8cda-d4f9bf78420f", "address": "fa:16:3e:35:51:b2", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9306ddf1-11", "ovs_interfaceid": "9306ddf1-11e6-4d9d-8cda-d4f9bf78420f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.934869] env[61978]: DEBUG nova.scheduler.client.report [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1299.984363] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.994707] env[61978]: DEBUG nova.compute.manager [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Received event network-vif-plugged-9306ddf1-11e6-4d9d-8cda-d4f9bf78420f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1299.994949] env[61978]: DEBUG oslo_concurrency.lockutils [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] Acquiring lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.995221] env[61978]: DEBUG oslo_concurrency.lockutils [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] Lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.995405] env[61978]: DEBUG oslo_concurrency.lockutils [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] Lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.995689] env[61978]: DEBUG nova.compute.manager [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] No waiting events found dispatching network-vif-plugged-9306ddf1-11e6-4d9d-8cda-d4f9bf78420f {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1299.995917] env[61978]: WARNING nova.compute.manager [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Received unexpected event network-vif-plugged-9306ddf1-11e6-4d9d-8cda-d4f9bf78420f for instance with vm_state building and task_state spawning. [ 1299.996130] env[61978]: DEBUG nova.compute.manager [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Received event network-changed-9306ddf1-11e6-4d9d-8cda-d4f9bf78420f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1299.996315] env[61978]: DEBUG nova.compute.manager [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Refreshing instance network info cache due to event network-changed-9306ddf1-11e6-4d9d-8cda-d4f9bf78420f. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1299.996543] env[61978]: DEBUG oslo_concurrency.lockutils [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] Acquiring lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1300.163696] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "a5b3f628-edc6-4d30-a179-ffc755f940f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.163952] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "a5b3f628-edc6-4d30-a179-ffc755f940f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.167832] env[61978]: DEBUG nova.compute.manager [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1300.179286] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395904, 'name': ReconfigVM_Task, 'duration_secs': 0.207994} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.179567] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Reconfigured VM instance instance-00000062 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1300.180381] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2836e212-4860-4d92-8688-7b26b067964a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.202864] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] volume-73762ddd-195c-421a-95df-d5230c3e7c5e/volume-73762ddd-195c-421a-95df-d5230c3e7c5e.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1300.205097] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19102334-72f1-43dc-a5f7-c63d4213e916 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.223361] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1300.223361] env[61978]: value = "task-1395908" [ 1300.223361] env[61978]: _type = "Task" [ 1300.223361] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.225578] env[61978]: DEBUG nova.virt.hardware [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1300.225881] env[61978]: DEBUG nova.virt.hardware [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1300.226075] env[61978]: DEBUG nova.virt.hardware [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1300.226277] env[61978]: DEBUG nova.virt.hardware [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1300.226432] env[61978]: DEBUG nova.virt.hardware [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1300.226584] env[61978]: DEBUG nova.virt.hardware [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1300.226814] env[61978]: DEBUG nova.virt.hardware [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1300.227011] env[61978]: DEBUG nova.virt.hardware [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1300.227239] env[61978]: DEBUG nova.virt.hardware [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1300.227438] env[61978]: DEBUG nova.virt.hardware [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1300.227623] env[61978]: DEBUG nova.virt.hardware [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1300.228859] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff03a469-ff0e-48be-afe8-0fb7900f8fe1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.242081] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1610c3ce-99f2-4c42-8bad-c6f8851fcfe7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.245782] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.286603] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1300.287037] env[61978]: DEBUG nova.compute.manager [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Instance network_info: |[{"id": "9306ddf1-11e6-4d9d-8cda-d4f9bf78420f", "address": "fa:16:3e:35:51:b2", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9306ddf1-11", "ovs_interfaceid": "9306ddf1-11e6-4d9d-8cda-d4f9bf78420f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1300.287373] env[61978]: DEBUG oslo_concurrency.lockutils [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] Acquired lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.287566] env[61978]: DEBUG nova.network.neutron [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Refreshing network info cache for port 9306ddf1-11e6-4d9d-8cda-d4f9bf78420f {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1300.288879] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:51:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9306ddf1-11e6-4d9d-8cda-d4f9bf78420f', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1300.296179] env[61978]: DEBUG oslo.service.loopingcall [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1300.299198] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1300.299660] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-613c28fb-37e1-4062-98d8-b270dc62df46 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.320098] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1300.320098] env[61978]: value = "task-1395909" [ 1300.320098] env[61978]: _type = "Task" [ 1300.320098] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.327773] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395909, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.442843] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.305s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.443383] env[61978]: DEBUG nova.compute.manager [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1300.446208] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.462s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.447735] env[61978]: INFO nova.compute.claims [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1300.554240] env[61978]: DEBUG nova.network.neutron [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updated VIF entry in instance network info cache for port 9306ddf1-11e6-4d9d-8cda-d4f9bf78420f. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1300.554636] env[61978]: DEBUG nova.network.neutron [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance_info_cache with network_info: [{"id": "9306ddf1-11e6-4d9d-8cda-d4f9bf78420f", "address": "fa:16:3e:35:51:b2", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9306ddf1-11", "ovs_interfaceid": "9306ddf1-11e6-4d9d-8cda-d4f9bf78420f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.667135] env[61978]: DEBUG nova.compute.manager [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1300.736694] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395908, 'name': ReconfigVM_Task, 'duration_secs': 0.501692} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.737031] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Reconfigured VM instance instance-00000062 to attach disk [datastore2] volume-73762ddd-195c-421a-95df-d5230c3e7c5e/volume-73762ddd-195c-421a-95df-d5230c3e7c5e.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1300.737327] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance 'c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9' progress to 50 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1300.832854] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395909, 'name': CreateVM_Task, 'duration_secs': 0.460044} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.833084] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1300.833968] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1300.834228] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.834672] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1300.835008] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67042d4e-19da-4403-a4fe-6df4662beec5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.840322] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1300.840322] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52111751-c804-30ed-2252-afda23e58919" [ 1300.840322] env[61978]: _type = "Task" [ 1300.840322] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.850586] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52111751-c804-30ed-2252-afda23e58919, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.953721] env[61978]: DEBUG nova.compute.utils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1300.957303] env[61978]: DEBUG nova.compute.manager [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1300.957513] env[61978]: DEBUG nova.network.neutron [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1301.013389] env[61978]: DEBUG nova.policy [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a208cee3d9c4efb8240ad943b55e915', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86ad52b551104a2594f1dbbc287f9efa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1301.059021] env[61978]: DEBUG oslo_concurrency.lockutils [req-d2221a08-720d-4fca-8bf1-be1c547719a5 req-8f163746-fa8d-4c82-881b-0c60ce45675d service nova] Releasing lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.102014] env[61978]: DEBUG nova.network.neutron [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Successfully updated port: 2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1301.204431] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.244323] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc31c0f5-4f54-4b60-b2b0-4d6023025316 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.265086] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361450a6-7a24-4eb2-8127-69b3d6284a78 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.285570] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance 'c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9' progress to 67 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1301.351713] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52111751-c804-30ed-2252-afda23e58919, 'name': SearchDatastore_Task, 'duration_secs': 0.038295} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.352047] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.352290] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1301.352548] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.352705] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.352893] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1301.353432] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2adacb24-d261-4525-a581-aedaf9339514 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.359992] env[61978]: DEBUG nova.network.neutron [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Successfully created port: f8cf63ba-ee62-4a3a-85e0-87d88ff84665 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1301.366919] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1301.367165] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1301.367985] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49e1ddae-75aa-478b-9d25-a31515cb17e5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.374312] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1301.374312] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c7ee20-e83e-468f-173e-c474da16d2b4" [ 1301.374312] env[61978]: _type = "Task" [ 1301.374312] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.382527] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c7ee20-e83e-468f-173e-c474da16d2b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.459056] env[61978]: DEBUG nova.compute.manager [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1301.605242] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.605432] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.605592] env[61978]: DEBUG nova.network.neutron [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1301.633073] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74bb9661-3558-4e2d-b3a9-594c283ff611 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.640818] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da03e118-8075-4dae-b062-9c65fdf62b0c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.670532] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac8f6e8-6e96-47e2-ab34-319d431d08d3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.677805] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea43836-c56c-4e70-ae1a-2e9c0ca59d5c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.694221] env[61978]: DEBUG nova.compute.provider_tree [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.885780] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c7ee20-e83e-468f-173e-c474da16d2b4, 'name': SearchDatastore_Task, 'duration_secs': 0.043717} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.886678] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5acc106-0829-4224-ab07-977ef9b34a09 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.892237] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1301.892237] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b0a0c5-36f0-ba7c-98ed-bd6bbb278861" [ 1301.892237] env[61978]: _type = "Task" [ 1301.892237] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.900964] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b0a0c5-36f0-ba7c-98ed-bd6bbb278861, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.022142] env[61978]: DEBUG nova.compute.manager [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Received event network-vif-plugged-2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1302.022289] env[61978]: DEBUG oslo_concurrency.lockutils [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] Acquiring lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.022539] env[61978]: DEBUG oslo_concurrency.lockutils [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.022659] env[61978]: DEBUG oslo_concurrency.lockutils [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.022838] env[61978]: DEBUG nova.compute.manager [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] No waiting events found dispatching network-vif-plugged-2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1302.023026] env[61978]: WARNING nova.compute.manager [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Received unexpected event network-vif-plugged-2a16d335-2f9e-47f7-a83c-44777d05ca3b for instance with vm_state building and task_state spawning. [ 1302.023257] env[61978]: DEBUG nova.compute.manager [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Received event network-changed-2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1302.023425] env[61978]: DEBUG nova.compute.manager [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Refreshing instance network info cache due to event network-changed-2a16d335-2f9e-47f7-a83c-44777d05ca3b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1302.023599] env[61978]: DEBUG oslo_concurrency.lockutils [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] Acquiring lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1302.138766] env[61978]: DEBUG nova.network.neutron [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1302.197928] env[61978]: DEBUG nova.scheduler.client.report [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1302.285469] env[61978]: DEBUG nova.network.neutron [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updating instance_info_cache with network_info: [{"id": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "address": "fa:16:3e:e2:18:65", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a16d335-2f", "ovs_interfaceid": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.403769] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b0a0c5-36f0-ba7c-98ed-bd6bbb278861, 'name': SearchDatastore_Task, 'duration_secs': 0.009315} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.404051] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1302.404323] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1/48d05337-7018-4dc2-a6a4-dd80ad3c4eb1.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1302.404587] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-273164f3-429d-43d1-bc46-42a0cf1b4d02 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.411731] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1302.411731] env[61978]: value = "task-1395910" [ 1302.411731] env[61978]: _type = "Task" [ 1302.411731] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.419580] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395910, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.473315] env[61978]: DEBUG nova.compute.manager [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1302.497848] env[61978]: DEBUG nova.virt.hardware [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1302.498162] env[61978]: DEBUG nova.virt.hardware [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1302.498356] env[61978]: DEBUG nova.virt.hardware [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1302.498578] env[61978]: DEBUG nova.virt.hardware [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1302.498764] env[61978]: DEBUG nova.virt.hardware [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1302.498947] env[61978]: DEBUG nova.virt.hardware [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1302.499211] env[61978]: DEBUG nova.virt.hardware [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1302.499397] env[61978]: DEBUG nova.virt.hardware [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1302.499603] env[61978]: DEBUG nova.virt.hardware [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1302.499809] env[61978]: DEBUG nova.virt.hardware [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1302.499995] env[61978]: DEBUG nova.virt.hardware [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1302.501181] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f721062b-e142-4d84-b9ae-bb30582a52cd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.509553] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e5324e-35bb-4e28-8830-5666c1820545 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.703139] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.257s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.703790] env[61978]: DEBUG nova.compute.manager [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1302.708785] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.504s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.710688] env[61978]: INFO nova.compute.claims [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1302.788334] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1302.788812] env[61978]: DEBUG nova.compute.manager [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Instance network_info: |[{"id": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "address": "fa:16:3e:e2:18:65", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a16d335-2f", "ovs_interfaceid": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1302.789472] env[61978]: DEBUG oslo_concurrency.lockutils [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] Acquired lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.789931] env[61978]: DEBUG nova.network.neutron [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Refreshing network info cache for port 2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1302.791324] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:18:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac7039c0-3374-4c08-87fc-af2449b48b02', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a16d335-2f9e-47f7-a83c-44777d05ca3b', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1302.799500] env[61978]: DEBUG oslo.service.loopingcall [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1302.800836] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1302.801195] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af4a93f7-6d39-45e2-bf58-6773c273b746 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.820892] env[61978]: DEBUG nova.compute.manager [req-7ce1f7ee-6956-401a-8b15-4ab1ef64fd5e req-6f53946e-afa0-4296-bb55-0bae395b2d99 service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Received event network-vif-plugged-f8cf63ba-ee62-4a3a-85e0-87d88ff84665 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1302.821348] env[61978]: DEBUG oslo_concurrency.lockutils [req-7ce1f7ee-6956-401a-8b15-4ab1ef64fd5e req-6f53946e-afa0-4296-bb55-0bae395b2d99 service nova] Acquiring lock "ac1676dd-affa-49cd-9e7b-a301abcec232-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.821643] env[61978]: DEBUG oslo_concurrency.lockutils [req-7ce1f7ee-6956-401a-8b15-4ab1ef64fd5e req-6f53946e-afa0-4296-bb55-0bae395b2d99 service nova] Lock "ac1676dd-affa-49cd-9e7b-a301abcec232-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.821865] env[61978]: DEBUG oslo_concurrency.lockutils [req-7ce1f7ee-6956-401a-8b15-4ab1ef64fd5e req-6f53946e-afa0-4296-bb55-0bae395b2d99 service nova] Lock "ac1676dd-affa-49cd-9e7b-a301abcec232-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.822129] env[61978]: DEBUG nova.compute.manager [req-7ce1f7ee-6956-401a-8b15-4ab1ef64fd5e req-6f53946e-afa0-4296-bb55-0bae395b2d99 service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] No waiting events found dispatching network-vif-plugged-f8cf63ba-ee62-4a3a-85e0-87d88ff84665 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1302.822351] env[61978]: WARNING nova.compute.manager [req-7ce1f7ee-6956-401a-8b15-4ab1ef64fd5e req-6f53946e-afa0-4296-bb55-0bae395b2d99 service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Received unexpected event network-vif-plugged-f8cf63ba-ee62-4a3a-85e0-87d88ff84665 for instance with vm_state building and task_state spawning. [ 1302.833177] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1302.833177] env[61978]: value = "task-1395911" [ 1302.833177] env[61978]: _type = "Task" [ 1302.833177] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.849388] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395911, 'name': CreateVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.925360] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395910, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47284} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.925674] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1/48d05337-7018-4dc2-a6a4-dd80ad3c4eb1.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1302.926432] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1302.926587] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8fab3d9-af53-4c1e-bc48-373cd8e93d18 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.932409] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1302.932409] env[61978]: value = "task-1395912" [ 1302.932409] env[61978]: _type = "Task" [ 1302.932409] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.941549] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395912, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.963265] env[61978]: DEBUG nova.network.neutron [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Port f394483a-0b84-4d01-aee1-a50c3a3ee0ff binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1303.216226] env[61978]: DEBUG nova.compute.utils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1303.217646] env[61978]: DEBUG nova.compute.manager [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1303.217826] env[61978]: DEBUG nova.network.neutron [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1303.263934] env[61978]: DEBUG nova.policy [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '346f982562de48fab1702aca567113ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a4f29a959447159b2f7d194ea94873', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1303.343057] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395911, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.424103] env[61978]: DEBUG nova.network.neutron [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Successfully updated port: f8cf63ba-ee62-4a3a-85e0-87d88ff84665 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1303.446092] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395912, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069984} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.447462] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1303.448753] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a74ebeb-5842-4326-999e-228cea3ead0f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.487862] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1/48d05337-7018-4dc2-a6a4-dd80ad3c4eb1.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1303.488766] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01fed509-8eef-44e3-917c-d2d2ecde1fd5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.513274] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1303.513274] env[61978]: value = "task-1395914" [ 1303.513274] env[61978]: _type = "Task" [ 1303.513274] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.524021] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395914, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.722617] env[61978]: DEBUG nova.compute.manager [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1303.845007] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395911, 'name': CreateVM_Task, 'duration_secs': 0.634268} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.845205] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1303.845871] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1303.846060] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.850820] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1303.851398] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee3040f8-0e61-451b-b61e-8f5afcd22151 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.856548] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1303.856548] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d39009-e2d2-c29d-57ac-a1b1b9163023" [ 1303.856548] env[61978]: _type = "Task" [ 1303.856548] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.868306] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d39009-e2d2-c29d-57ac-a1b1b9163023, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.870487] env[61978]: DEBUG nova.network.neutron [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updated VIF entry in instance network info cache for port 2a16d335-2f9e-47f7-a83c-44777d05ca3b. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1303.870800] env[61978]: DEBUG nova.network.neutron [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updating instance_info_cache with network_info: [{"id": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "address": "fa:16:3e:e2:18:65", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a16d335-2f", "ovs_interfaceid": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.912817] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7f99fd-d903-4d32-9829-c256d533ab85 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.921199] env[61978]: DEBUG nova.network.neutron [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Successfully created port: 7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1303.923672] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fbb56b7-3f5d-460b-bf80-48e28bcfb000 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.926951] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1303.927109] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.927256] env[61978]: DEBUG nova.network.neutron [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1303.956398] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd57a0be-6f23-4245-af76-5a9f0c7e11fb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.964273] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719238f2-a7ce-4791-9e57-b997da8577cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.977795] env[61978]: DEBUG nova.compute.provider_tree [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1304.004191] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.004358] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.005373] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.025512] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395914, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.047919] env[61978]: DEBUG nova.compute.manager [req-8e74b0b3-ed1b-45ca-8b24-e8b6eabd656b req-7250097b-589d-4d45-9c67-60fbae0f3a1c service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Received event network-changed-f8cf63ba-ee62-4a3a-85e0-87d88ff84665 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1304.048201] env[61978]: DEBUG nova.compute.manager [req-8e74b0b3-ed1b-45ca-8b24-e8b6eabd656b req-7250097b-589d-4d45-9c67-60fbae0f3a1c service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Refreshing instance network info cache due to event network-changed-f8cf63ba-ee62-4a3a-85e0-87d88ff84665. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1304.048414] env[61978]: DEBUG oslo_concurrency.lockutils [req-8e74b0b3-ed1b-45ca-8b24-e8b6eabd656b req-7250097b-589d-4d45-9c67-60fbae0f3a1c service nova] Acquiring lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.366895] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d39009-e2d2-c29d-57ac-a1b1b9163023, 'name': SearchDatastore_Task, 'duration_secs': 0.01432} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.367264] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1304.367508] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1304.367751] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.367922] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.368226] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1304.368494] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-effa5464-6a0b-45a8-b41e-34201db30b23 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.375072] env[61978]: DEBUG oslo_concurrency.lockutils [req-f57963ad-1cae-4f2e-9af7-1b01a1332f09 req-bc639e71-9d9e-4b0c-a624-ba7a0e3e00e9 service nova] Releasing lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1304.376824] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1304.377035] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1304.377776] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f3d88b6-db7d-4a6b-ac7f-2ae4c7f17527 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.382533] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1304.382533] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a193c8-6965-7db6-c960-32d13632bc9a" [ 1304.382533] env[61978]: _type = "Task" [ 1304.382533] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.390089] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a193c8-6965-7db6-c960-32d13632bc9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.459209] env[61978]: DEBUG nova.network.neutron [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1304.480950] env[61978]: DEBUG nova.scheduler.client.report [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1304.525080] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395914, 'name': ReconfigVM_Task, 'duration_secs': 0.515923} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.525383] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1/48d05337-7018-4dc2-a6a4-dd80ad3c4eb1.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1304.526017] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2367506-c4f2-42ba-967f-a2ea6b022067 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.534230] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1304.534230] env[61978]: value = "task-1395915" [ 1304.534230] env[61978]: _type = "Task" [ 1304.534230] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.542595] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395915, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.606142] env[61978]: DEBUG nova.network.neutron [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance_info_cache with network_info: [{"id": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "address": "fa:16:3e:dd:50:05", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cf63ba-ee", "ovs_interfaceid": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.735045] env[61978]: DEBUG nova.compute.manager [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1304.761849] env[61978]: DEBUG nova.virt.hardware [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1304.762256] env[61978]: DEBUG nova.virt.hardware [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1304.762474] env[61978]: DEBUG nova.virt.hardware [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1304.762683] env[61978]: DEBUG nova.virt.hardware [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1304.762842] env[61978]: DEBUG nova.virt.hardware [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1304.763009] env[61978]: DEBUG nova.virt.hardware [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1304.763249] env[61978]: DEBUG nova.virt.hardware [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1304.763424] env[61978]: DEBUG nova.virt.hardware [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1304.763669] env[61978]: DEBUG nova.virt.hardware [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1304.763851] env[61978]: DEBUG nova.virt.hardware [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1304.764044] env[61978]: DEBUG nova.virt.hardware [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1304.764938] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc89b9b-6fd9-41d2-9c0d-ae16f0fd109a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.773223] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059bb801-28f2-4b41-95e5-656a226de853 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.892658] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a193c8-6965-7db6-c960-32d13632bc9a, 'name': SearchDatastore_Task, 'duration_secs': 0.017312} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.893476] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f77e3190-ef7e-448b-8775-e3bc5db15d86 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.899480] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1304.899480] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fb98f3-150d-7dc5-a75c-e304777879bf" [ 1304.899480] env[61978]: _type = "Task" [ 1304.899480] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.910111] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fb98f3-150d-7dc5-a75c-e304777879bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.986406] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.278s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.986953] env[61978]: DEBUG nova.compute.manager [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1305.036524] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1305.036743] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.036995] env[61978]: DEBUG nova.network.neutron [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1305.047346] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395915, 'name': Rename_Task, 'duration_secs': 0.44201} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.048166] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1305.048432] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa286d13-33b9-4689-a829-3c22ce7a7730 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.054564] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1305.054564] env[61978]: value = "task-1395917" [ 1305.054564] env[61978]: _type = "Task" [ 1305.054564] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.064038] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395917, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.109258] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.109676] env[61978]: DEBUG nova.compute.manager [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Instance network_info: |[{"id": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "address": "fa:16:3e:dd:50:05", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cf63ba-ee", "ovs_interfaceid": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1305.110055] env[61978]: DEBUG oslo_concurrency.lockutils [req-8e74b0b3-ed1b-45ca-8b24-e8b6eabd656b req-7250097b-589d-4d45-9c67-60fbae0f3a1c service nova] Acquired lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.110275] env[61978]: DEBUG nova.network.neutron [req-8e74b0b3-ed1b-45ca-8b24-e8b6eabd656b req-7250097b-589d-4d45-9c67-60fbae0f3a1c service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Refreshing network info cache for port f8cf63ba-ee62-4a3a-85e0-87d88ff84665 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1305.111540] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:50:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8cf63ba-ee62-4a3a-85e0-87d88ff84665', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1305.119168] env[61978]: DEBUG oslo.service.loopingcall [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1305.122315] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1305.122841] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c85395f-ae85-4bb3-b52a-4b69923780ac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.143037] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1305.143037] env[61978]: value = "task-1395918" [ 1305.143037] env[61978]: _type = "Task" [ 1305.143037] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.152134] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395918, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.410959] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52fb98f3-150d-7dc5-a75c-e304777879bf, 'name': SearchDatastore_Task, 'duration_secs': 0.02848} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.411118] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.411299] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd/fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1305.411574] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2aefb05d-1f9d-4b72-acf1-4272a71f06c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.418458] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1305.418458] env[61978]: value = "task-1395919" [ 1305.418458] env[61978]: _type = "Task" [ 1305.418458] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.426739] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395919, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.463039] env[61978]: DEBUG nova.network.neutron [req-8e74b0b3-ed1b-45ca-8b24-e8b6eabd656b req-7250097b-589d-4d45-9c67-60fbae0f3a1c service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updated VIF entry in instance network info cache for port f8cf63ba-ee62-4a3a-85e0-87d88ff84665. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1305.463643] env[61978]: DEBUG nova.network.neutron [req-8e74b0b3-ed1b-45ca-8b24-e8b6eabd656b req-7250097b-589d-4d45-9c67-60fbae0f3a1c service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance_info_cache with network_info: [{"id": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "address": "fa:16:3e:dd:50:05", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cf63ba-ee", "ovs_interfaceid": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.492687] env[61978]: DEBUG nova.compute.utils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1305.494701] env[61978]: DEBUG nova.compute.manager [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1305.494882] env[61978]: DEBUG nova.network.neutron [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1305.566485] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395917, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.586716] env[61978]: DEBUG nova.policy [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83e49dfbe8d44d23a25c9dba3d2eeb50', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4975f37c081466ab85cf1c21b750c10', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1305.652444] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395918, 'name': CreateVM_Task, 'duration_secs': 0.493361} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.652648] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1305.653340] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1305.653518] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.653851] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1305.654136] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60504d97-2be7-45f1-aee5-1c366b367bb1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.659030] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1305.659030] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520bed8a-4d74-9a7e-5aab-67829c19cca6" [ 1305.659030] env[61978]: _type = "Task" [ 1305.659030] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.669427] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520bed8a-4d74-9a7e-5aab-67829c19cca6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.786957] env[61978]: DEBUG nova.network.neutron [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Successfully updated port: 7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1305.933014] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395919, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.967727] env[61978]: DEBUG oslo_concurrency.lockutils [req-8e74b0b3-ed1b-45ca-8b24-e8b6eabd656b req-7250097b-589d-4d45-9c67-60fbae0f3a1c service nova] Releasing lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1305.995948] env[61978]: DEBUG nova.compute.manager [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1306.067541] env[61978]: DEBUG oslo_vmware.api [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395917, 'name': PowerOnVM_Task, 'duration_secs': 0.879142} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.067841] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1306.068079] env[61978]: INFO nova.compute.manager [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Took 8.14 seconds to spawn the instance on the hypervisor. [ 1306.072016] env[61978]: DEBUG nova.compute.manager [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1306.072016] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2086eec-8c00-482a-a387-8993f9d944f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.079106] env[61978]: DEBUG nova.compute.manager [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Received event network-vif-plugged-7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1306.079324] env[61978]: DEBUG oslo_concurrency.lockutils [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] Acquiring lock "b4541d84-b4c3-4441-b5a7-90de2dac3562-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1306.079608] env[61978]: DEBUG oslo_concurrency.lockutils [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] Lock "b4541d84-b4c3-4441-b5a7-90de2dac3562-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.079790] env[61978]: DEBUG oslo_concurrency.lockutils [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] Lock "b4541d84-b4c3-4441-b5a7-90de2dac3562-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.079973] env[61978]: DEBUG nova.compute.manager [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] No waiting events found dispatching network-vif-plugged-7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1306.080157] env[61978]: WARNING nova.compute.manager [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Received unexpected event network-vif-plugged-7cefaef7-7dfd-4081-8872-bbdb8d201973 for instance with vm_state building and task_state spawning. [ 1306.080324] env[61978]: DEBUG nova.compute.manager [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Received event network-changed-7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1306.080527] env[61978]: DEBUG nova.compute.manager [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Refreshing instance network info cache due to event network-changed-7cefaef7-7dfd-4081-8872-bbdb8d201973. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1306.080714] env[61978]: DEBUG oslo_concurrency.lockutils [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] Acquiring lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1306.080926] env[61978]: DEBUG oslo_concurrency.lockutils [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] Acquired lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.081062] env[61978]: DEBUG nova.network.neutron [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Refreshing network info cache for port 7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1306.092545] env[61978]: DEBUG nova.network.neutron [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance_info_cache with network_info: [{"id": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "address": "fa:16:3e:de:15:87", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf394483a-0b", "ovs_interfaceid": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.169378] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520bed8a-4d74-9a7e-5aab-67829c19cca6, 'name': SearchDatastore_Task, 'duration_secs': 0.082641} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.169500] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1306.169733] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1306.169972] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1306.170140] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.170327] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1306.170605] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a626135c-7bc1-4ac2-9e34-a2aa9ffe8de2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.179788] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1306.179937] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1306.180621] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-901317ea-d7bc-4172-a02d-2449583f99e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.183511] env[61978]: DEBUG nova.network.neutron [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Successfully created port: 90e06aab-ecdf-42fb-b192-54ed2dd034ea {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1306.189817] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1306.189817] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d89411-cf43-9335-d963-03e10aa4af67" [ 1306.189817] env[61978]: _type = "Task" [ 1306.189817] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.200967] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d89411-cf43-9335-d963-03e10aa4af67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.289730] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1306.430371] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395919, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.669984} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.430371] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd/fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1306.430581] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1306.430808] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-800ae411-e036-43d8-a51f-a8eceec790da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.440868] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1306.440868] env[61978]: value = "task-1395920" [ 1306.440868] env[61978]: _type = "Task" [ 1306.440868] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.451215] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395920, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.592207] env[61978]: INFO nova.compute.manager [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Took 16.74 seconds to build instance. [ 1306.595036] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1306.620713] env[61978]: DEBUG nova.network.neutron [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1306.703241] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d89411-cf43-9335-d963-03e10aa4af67, 'name': SearchDatastore_Task, 'duration_secs': 0.043891} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.704214] env[61978]: DEBUG nova.network.neutron [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.706146] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-791a9690-ca97-4145-ab87-dbcd36fdbbea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.714369] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1306.714369] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5264915c-2d22-c313-fc0e-2ce453a35e8e" [ 1306.714369] env[61978]: _type = "Task" [ 1306.714369] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.726392] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5264915c-2d22-c313-fc0e-2ce453a35e8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.952634] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395920, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084852} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.952958] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1306.953804] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ae6203-d033-4319-a720-787525edabca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.979355] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd/fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1306.980149] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c6c8ef9-a538-4bb4-9fa6-92b430453d8a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.002705] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1307.002705] env[61978]: value = "task-1395921" [ 1307.002705] env[61978]: _type = "Task" [ 1307.002705] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.006800] env[61978]: DEBUG nova.compute.manager [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1307.016308] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395921, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.034167] env[61978]: DEBUG nova.virt.hardware [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1307.034452] env[61978]: DEBUG nova.virt.hardware [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1307.034639] env[61978]: DEBUG nova.virt.hardware [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1307.034843] env[61978]: DEBUG nova.virt.hardware [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1307.035017] env[61978]: DEBUG nova.virt.hardware [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1307.035217] env[61978]: DEBUG nova.virt.hardware [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1307.035444] env[61978]: DEBUG nova.virt.hardware [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1307.035632] env[61978]: DEBUG nova.virt.hardware [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1307.035820] env[61978]: DEBUG nova.virt.hardware [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1307.036034] env[61978]: DEBUG nova.virt.hardware [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1307.036248] env[61978]: DEBUG nova.virt.hardware [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1307.037259] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27796dd-3ccc-464a-99db-c5792e8f4fdd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.045813] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40dde592-f435-429d-8516-9d55b7b1d812 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.095751] env[61978]: DEBUG oslo_concurrency.lockutils [None req-55210947-d688-43a5-ae36-4d4113bc9697 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.250s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.103801] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6dce7a6-3703-4eee-a979-2ec5224070f3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.112477] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a6c089-6455-4f00-ae3c-9d2c6391c14b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.210416] env[61978]: DEBUG oslo_concurrency.lockutils [req-35636745-7917-4ae9-baeb-d3eb47818455 req-dfd1df86-dd55-4bdd-9cd9-b4e370e8c092 service nova] Releasing lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.210963] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.211218] env[61978]: DEBUG nova.network.neutron [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1307.230289] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5264915c-2d22-c313-fc0e-2ce453a35e8e, 'name': SearchDatastore_Task, 'duration_secs': 0.018086} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.230669] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.231061] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] ac1676dd-affa-49cd-9e7b-a301abcec232/ac1676dd-affa-49cd-9e7b-a301abcec232.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1307.231418] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1773a9c2-0286-4b21-b7b5-16dd91183687 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.239455] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1307.239455] env[61978]: value = "task-1395923" [ 1307.239455] env[61978]: _type = "Task" [ 1307.239455] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.248469] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395923, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.513800] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395921, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.703707] env[61978]: DEBUG nova.network.neutron [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Successfully updated port: 90e06aab-ecdf-42fb-b192-54ed2dd034ea {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1307.744717] env[61978]: DEBUG nova.network.neutron [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1307.756027] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395923, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.891266] env[61978]: DEBUG nova.network.neutron [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updating instance_info_cache with network_info: [{"id": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "address": "fa:16:3e:40:2b:80", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cefaef7-7d", "ovs_interfaceid": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.940564] env[61978]: DEBUG nova.compute.manager [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Stashing vm_state: active {{(pid=61978) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1308.015387] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395921, 'name': ReconfigVM_Task, 'duration_secs': 0.56683} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.015730] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Reconfigured VM instance instance-00000066 to attach disk [datastore2] fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd/fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1308.016392] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4782da3-2156-4972-8f28-1fbcaaeef23e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.025418] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1308.025418] env[61978]: value = "task-1395924" [ 1308.025418] env[61978]: _type = "Task" [ 1308.025418] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.036756] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395924, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.121039] env[61978]: DEBUG nova.compute.manager [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Received event network-vif-plugged-90e06aab-ecdf-42fb-b192-54ed2dd034ea {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1308.121371] env[61978]: DEBUG oslo_concurrency.lockutils [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] Acquiring lock "a5b3f628-edc6-4d30-a179-ffc755f940f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1308.121643] env[61978]: DEBUG oslo_concurrency.lockutils [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] Lock "a5b3f628-edc6-4d30-a179-ffc755f940f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.121928] env[61978]: DEBUG oslo_concurrency.lockutils [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] Lock "a5b3f628-edc6-4d30-a179-ffc755f940f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1308.122174] env[61978]: DEBUG nova.compute.manager [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] No waiting events found dispatching network-vif-plugged-90e06aab-ecdf-42fb-b192-54ed2dd034ea {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1308.122460] env[61978]: WARNING nova.compute.manager [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Received unexpected event network-vif-plugged-90e06aab-ecdf-42fb-b192-54ed2dd034ea for instance with vm_state building and task_state spawning. [ 1308.122704] env[61978]: DEBUG nova.compute.manager [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Received event network-changed-90e06aab-ecdf-42fb-b192-54ed2dd034ea {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1308.123044] env[61978]: DEBUG nova.compute.manager [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Refreshing instance network info cache due to event network-changed-90e06aab-ecdf-42fb-b192-54ed2dd034ea. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1308.123303] env[61978]: DEBUG oslo_concurrency.lockutils [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] Acquiring lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.123557] env[61978]: DEBUG oslo_concurrency.lockutils [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] Acquired lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.123851] env[61978]: DEBUG nova.network.neutron [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Refreshing network info cache for port 90e06aab-ecdf-42fb-b192-54ed2dd034ea {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1308.207953] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.214124] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-948f1dbf-b33b-4e28-8160-811a55f97ee5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.235303] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80327c4-ab3e-4525-a517-708fef5f40db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.246161] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance 'c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9' progress to 83 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1308.257049] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395923, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.914392} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.257049] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] ac1676dd-affa-49cd-9e7b-a301abcec232/ac1676dd-affa-49cd-9e7b-a301abcec232.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1308.257236] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1308.257461] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c6a6bade-df88-4261-8fc0-26de0830c72a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.265473] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1308.265473] env[61978]: value = "task-1395925" [ 1308.265473] env[61978]: _type = "Task" [ 1308.265473] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.274433] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395925, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.394723] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1308.394723] env[61978]: DEBUG nova.compute.manager [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Instance network_info: |[{"id": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "address": "fa:16:3e:40:2b:80", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cefaef7-7d", "ovs_interfaceid": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1308.395160] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:2b:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7cefaef7-7dfd-4081-8872-bbdb8d201973', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1308.402691] env[61978]: DEBUG oslo.service.loopingcall [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1308.402911] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1308.403527] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d09de168-c433-4f20-877d-bcae92abc4e7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.425148] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1308.425148] env[61978]: value = "task-1395926" [ 1308.425148] env[61978]: _type = "Task" [ 1308.425148] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.433713] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395926, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.457139] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1308.457433] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.545481] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395924, 'name': Rename_Task, 'duration_secs': 0.288875} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.545784] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1308.546112] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-628e81d5-df04-4a4a-affa-cda63677c632 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.553418] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1308.553418] env[61978]: value = "task-1395927" [ 1308.553418] env[61978]: _type = "Task" [ 1308.553418] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.562970] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395927, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.666142] env[61978]: DEBUG nova.network.neutron [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1308.754202] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1308.754605] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fa51ca7-7a06-402b-bd03-a13df73715d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.763704] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1308.763704] env[61978]: value = "task-1395928" [ 1308.763704] env[61978]: _type = "Task" [ 1308.763704] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.773542] env[61978]: DEBUG nova.network.neutron [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.780971] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.780971] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395925, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073778} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.781535] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1308.782350] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95fde83-efe0-4b07-8907-44961b83175a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.807099] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] ac1676dd-affa-49cd-9e7b-a301abcec232/ac1676dd-affa-49cd-9e7b-a301abcec232.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1308.807791] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-faa769b2-1f23-4b5e-9a34-d5c87c7bbb40 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.827442] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1308.827442] env[61978]: value = "task-1395929" [ 1308.827442] env[61978]: _type = "Task" [ 1308.827442] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.835991] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395929, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.934812] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395926, 'name': CreateVM_Task, 'duration_secs': 0.412685} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.935453] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1308.935916] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.936189] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.936578] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1308.937281] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bee35c05-37cb-436b-b272-f284827b99e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.943019] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1308.943019] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52176824-b776-e03b-c661-2f18f2759032" [ 1308.943019] env[61978]: _type = "Task" [ 1308.943019] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.950379] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52176824-b776-e03b-c661-2f18f2759032, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.961922] env[61978]: INFO nova.compute.claims [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1309.064142] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395927, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.273107] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395928, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.282779] env[61978]: DEBUG oslo_concurrency.lockutils [req-50e9f022-b8ea-429f-9173-343bf5c62d4c req-5650cf33-4e69-400f-8c64-fab30bc8efc8 service nova] Releasing lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.283167] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquired lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.283340] env[61978]: DEBUG nova.network.neutron [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1309.338899] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395929, 'name': ReconfigVM_Task, 'duration_secs': 0.295755} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.340153] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Reconfigured VM instance instance-00000067 to attach disk [datastore2] ac1676dd-affa-49cd-9e7b-a301abcec232/ac1676dd-affa-49cd-9e7b-a301abcec232.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1309.340273] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e0b22fc-d7a7-4701-86ff-db7d45e203c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.346030] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1309.346030] env[61978]: value = "task-1395931" [ 1309.346030] env[61978]: _type = "Task" [ 1309.346030] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.354458] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395931, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.453401] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52176824-b776-e03b-c661-2f18f2759032, 'name': SearchDatastore_Task, 'duration_secs': 0.018901} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.453749] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.454011] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1309.454269] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1309.454427] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.454613] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1309.454922] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec3779e7-f4ab-4402-a68a-4c0e03bae174 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.464567] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1309.464767] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1309.465581] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-893e443d-0440-4e73-9006-1ef4ceab6ecf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.469404] env[61978]: INFO nova.compute.resource_tracker [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating resource usage from migration 25324394-3e2c-4647-9958-44e8e2ecc167 [ 1309.476153] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1309.476153] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52146684-8b94-014c-24d2-b5487a0fc43f" [ 1309.476153] env[61978]: _type = "Task" [ 1309.476153] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.486567] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52146684-8b94-014c-24d2-b5487a0fc43f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.566995] env[61978]: DEBUG oslo_vmware.api [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395927, 'name': PowerOnVM_Task, 'duration_secs': 0.968891} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.567360] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1309.567656] env[61978]: INFO nova.compute.manager [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1309.567884] env[61978]: DEBUG nova.compute.manager [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1309.568921] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f65bec-7e2c-4e4c-a2fe-32001e8489b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.662326] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c4db5c-09c5-4a3e-b14f-fea9108dcdb0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.671777] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f3cffe-bee4-4b4d-b9e5-ca94f1ae808f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.707508] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84c9d56-bc2e-46bf-921d-29f9f062d628 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.715084] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a07869-966b-413b-85ed-64f570967a1b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.729685] env[61978]: DEBUG nova.compute.provider_tree [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1309.775392] env[61978]: DEBUG oslo_vmware.api [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395928, 'name': PowerOnVM_Task, 'duration_secs': 0.752192} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.775748] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1309.775983] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f3262a66-9d94-4610-bbc9-b92aa5afb7cc tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance 'c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9' progress to 100 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1309.815898] env[61978]: DEBUG nova.network.neutron [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1309.857732] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395931, 'name': Rename_Task, 'duration_secs': 0.277453} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.858027] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1309.858275] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35f1774a-ef29-43fd-aa4c-e8ca9a35f4bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.865198] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1309.865198] env[61978]: value = "task-1395932" [ 1309.865198] env[61978]: _type = "Task" [ 1309.865198] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.874499] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395932, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.894710] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.894985] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.956015] env[61978]: DEBUG nova.network.neutron [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Updating instance_info_cache with network_info: [{"id": "90e06aab-ecdf-42fb-b192-54ed2dd034ea", "address": "fa:16:3e:e8:31:3b", "network": {"id": "cf176714-7dd5-4d40-8f6d-7e46444187ca", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1909576930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4975f37c081466ab85cf1c21b750c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90e06aab-ec", "ovs_interfaceid": "90e06aab-ecdf-42fb-b192-54ed2dd034ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.986013] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52146684-8b94-014c-24d2-b5487a0fc43f, 'name': SearchDatastore_Task, 'duration_secs': 0.01361} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.986898] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf236837-5603-47a7-9381-567ef025cc7c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.991935] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1309.991935] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e85698-719d-2ab6-f6db-5378d64a5f67" [ 1309.991935] env[61978]: _type = "Task" [ 1309.991935] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.999487] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e85698-719d-2ab6-f6db-5378d64a5f67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.091548] env[61978]: INFO nova.compute.manager [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Took 16.01 seconds to build instance. [ 1310.235761] env[61978]: DEBUG nova.scheduler.client.report [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1310.382086] env[61978]: DEBUG oslo_vmware.api [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1395932, 'name': PowerOnVM_Task, 'duration_secs': 0.513601} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.382778] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1310.383193] env[61978]: INFO nova.compute.manager [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Took 7.91 seconds to spawn the instance on the hypervisor. [ 1310.383564] env[61978]: DEBUG nova.compute.manager [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1310.386160] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a377491-2c6e-472d-a89b-560bb3bec35f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.401853] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1310.407044] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1310.407044] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1310.461018] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Releasing lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.461018] env[61978]: DEBUG nova.compute.manager [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Instance network_info: |[{"id": "90e06aab-ecdf-42fb-b192-54ed2dd034ea", "address": "fa:16:3e:e8:31:3b", "network": {"id": "cf176714-7dd5-4d40-8f6d-7e46444187ca", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1909576930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4975f37c081466ab85cf1c21b750c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90e06aab-ec", "ovs_interfaceid": "90e06aab-ecdf-42fb-b192-54ed2dd034ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1310.461018] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:31:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db068f71-08cc-42d4-8ab6-17134c1585e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90e06aab-ecdf-42fb-b192-54ed2dd034ea', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1310.467853] env[61978]: DEBUG oslo.service.loopingcall [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1310.468305] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1310.468704] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bbf5d319-037f-44ce-86b9-c99af5c6b0df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.490352] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1310.490352] env[61978]: value = "task-1395933" [ 1310.490352] env[61978]: _type = "Task" [ 1310.490352] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.502414] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395933, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.505835] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e85698-719d-2ab6-f6db-5378d64a5f67, 'name': SearchDatastore_Task, 'duration_secs': 0.01894} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.506183] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.506494] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] b4541d84-b4c3-4441-b5a7-90de2dac3562/b4541d84-b4c3-4441-b5a7-90de2dac3562.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1310.506789] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96936163-88b3-46c0-8f07-ed418cfb2aa6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.512912] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1310.512912] env[61978]: value = "task-1395934" [ 1310.512912] env[61978]: _type = "Task" [ 1310.512912] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.522549] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.565379] env[61978]: DEBUG nova.compute.manager [req-ae72aa9b-13a7-481c-bb3f-156f161ca92d req-9b8fb376-0f6d-400c-8c63-967646b0c364 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Received event network-changed-2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1310.566051] env[61978]: DEBUG nova.compute.manager [req-ae72aa9b-13a7-481c-bb3f-156f161ca92d req-9b8fb376-0f6d-400c-8c63-967646b0c364 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Refreshing instance network info cache due to event network-changed-2a16d335-2f9e-47f7-a83c-44777d05ca3b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1310.566051] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae72aa9b-13a7-481c-bb3f-156f161ca92d req-9b8fb376-0f6d-400c-8c63-967646b0c364 service nova] Acquiring lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.566051] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae72aa9b-13a7-481c-bb3f-156f161ca92d req-9b8fb376-0f6d-400c-8c63-967646b0c364 service nova] Acquired lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.566394] env[61978]: DEBUG nova.network.neutron [req-ae72aa9b-13a7-481c-bb3f-156f161ca92d req-9b8fb376-0f6d-400c-8c63-967646b0c364 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Refreshing network info cache for port 2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1310.593826] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9ce7f3bc-1185-44a3-a965-8219cfebb4ec tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.526s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1310.741979] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.284s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1310.742243] env[61978]: INFO nova.compute.manager [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Migrating [ 1310.903631] env[61978]: INFO nova.compute.manager [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Took 16.15 seconds to build instance. [ 1310.911473] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Skipping network cache update for instance because it is Building. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1310.911473] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Skipping network cache update for instance because it is Building. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1310.911473] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Skipping network cache update for instance because it is Building. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1310.942633] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.942792] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquired lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.942945] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Forcefully refreshing network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1310.943136] env[61978]: DEBUG nova.objects.instance [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lazy-loading 'info_cache' on Instance uuid 243e7146-46fc-43f4-a83b-cdc58f397f9e {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1311.001022] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395933, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.022664] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395934, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.259913] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1311.259913] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.259913] env[61978]: DEBUG nova.network.neutron [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1311.405011] env[61978]: DEBUG oslo_concurrency.lockutils [None req-37a97635-2306-4d3d-9fe9-6f95f6929c18 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "ac1676dd-affa-49cd-9e7b-a301abcec232" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.667s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.486196] env[61978]: DEBUG nova.network.neutron [req-ae72aa9b-13a7-481c-bb3f-156f161ca92d req-9b8fb376-0f6d-400c-8c63-967646b0c364 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updated VIF entry in instance network info cache for port 2a16d335-2f9e-47f7-a83c-44777d05ca3b. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1311.486569] env[61978]: DEBUG nova.network.neutron [req-ae72aa9b-13a7-481c-bb3f-156f161ca92d req-9b8fb376-0f6d-400c-8c63-967646b0c364 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updating instance_info_cache with network_info: [{"id": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "address": "fa:16:3e:e2:18:65", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a16d335-2f", "ovs_interfaceid": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.501057] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395933, 'name': CreateVM_Task, 'duration_secs': 0.757481} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.501234] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1311.501953] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1311.502138] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.502732] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1311.502732] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc77f7a9-ca77-402d-ab63-36709f4354a7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.510367] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1311.510367] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52852879-4edb-b738-96b8-b51159b1f2f2" [ 1311.510367] env[61978]: _type = "Task" [ 1311.510367] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.522809] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52852879-4edb-b738-96b8-b51159b1f2f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.526366] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395934, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.771093} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.526619] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] b4541d84-b4c3-4441-b5a7-90de2dac3562/b4541d84-b4c3-4441-b5a7-90de2dac3562.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1311.526916] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1311.527253] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cbb98973-b996-4a40-ab46-7ffb10fc39f5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.537029] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1311.537029] env[61978]: value = "task-1395936" [ 1311.537029] env[61978]: _type = "Task" [ 1311.537029] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.545220] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395936, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.628115] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.628115] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.628115] env[61978]: DEBUG nova.compute.manager [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Going to confirm migration 8 {{(pid=61978) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1311.963882] env[61978]: DEBUG nova.network.neutron [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance_info_cache with network_info: [{"id": "9306ddf1-11e6-4d9d-8cda-d4f9bf78420f", "address": "fa:16:3e:35:51:b2", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9306ddf1-11", "ovs_interfaceid": "9306ddf1-11e6-4d9d-8cda-d4f9bf78420f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.989701] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae72aa9b-13a7-481c-bb3f-156f161ca92d req-9b8fb376-0f6d-400c-8c63-967646b0c364 service nova] Releasing lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.020725] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52852879-4edb-b738-96b8-b51159b1f2f2, 'name': SearchDatastore_Task, 'duration_secs': 0.11909} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.020990] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.021246] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1312.021487] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1312.021631] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.021809] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1312.022120] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-807f86d0-7dee-44f6-867e-59fd4ce86fe1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.043053] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395936, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078943} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.043359] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1312.044132] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136cdeaa-4554-4640-baf4-031fd5b9a0cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.065438] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] b4541d84-b4c3-4441-b5a7-90de2dac3562/b4541d84-b4c3-4441-b5a7-90de2dac3562.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1312.065691] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3797e6d-1b41-492d-8193-fe80f57ed4e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.084080] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1312.084439] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1312.085333] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d4cc07b-69d1-4944-b112-10530a98e653 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.089312] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1312.089312] env[61978]: value = "task-1395937" [ 1312.089312] env[61978]: _type = "Task" [ 1312.089312] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.094074] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1312.094074] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ecb2c3-7c1f-d36c-8d9d-84f87d74bcd7" [ 1312.094074] env[61978]: _type = "Task" [ 1312.094074] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.101319] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395937, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.106752] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ecb2c3-7c1f-d36c-8d9d-84f87d74bcd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.198947] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1312.199130] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquired lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.199321] env[61978]: DEBUG nova.network.neutron [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1312.199511] env[61978]: DEBUG nova.objects.instance [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lazy-loading 'info_cache' on Instance uuid c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1312.466878] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.592181] env[61978]: DEBUG nova.compute.manager [req-b1cab900-a78c-4b30-ad65-f1e5e71a139e req-242754b2-a0d9-41e0-b9bf-91d1374d086a service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Received event network-changed-f8cf63ba-ee62-4a3a-85e0-87d88ff84665 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1312.592398] env[61978]: DEBUG nova.compute.manager [req-b1cab900-a78c-4b30-ad65-f1e5e71a139e req-242754b2-a0d9-41e0-b9bf-91d1374d086a service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Refreshing instance network info cache due to event network-changed-f8cf63ba-ee62-4a3a-85e0-87d88ff84665. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1312.592613] env[61978]: DEBUG oslo_concurrency.lockutils [req-b1cab900-a78c-4b30-ad65-f1e5e71a139e req-242754b2-a0d9-41e0-b9bf-91d1374d086a service nova] Acquiring lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1312.592761] env[61978]: DEBUG oslo_concurrency.lockutils [req-b1cab900-a78c-4b30-ad65-f1e5e71a139e req-242754b2-a0d9-41e0-b9bf-91d1374d086a service nova] Acquired lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.592928] env[61978]: DEBUG nova.network.neutron [req-b1cab900-a78c-4b30-ad65-f1e5e71a139e req-242754b2-a0d9-41e0-b9bf-91d1374d086a service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Refreshing network info cache for port f8cf63ba-ee62-4a3a-85e0-87d88ff84665 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1312.607222] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395937, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.611207] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ecb2c3-7c1f-d36c-8d9d-84f87d74bcd7, 'name': SearchDatastore_Task, 'duration_secs': 0.01273} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.611998] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ca97d07-2edb-42e6-a6cb-a4a0925015c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.616930] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1312.616930] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]529315b8-acbc-822f-7b42-51cfdcfaa039" [ 1312.616930] env[61978]: _type = "Task" [ 1312.616930] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.627944] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529315b8-acbc-822f-7b42-51cfdcfaa039, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.659311] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updating instance_info_cache with network_info: [{"id": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "address": "fa:16:3e:0c:ea:2e", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2daa968c-ac", "ovs_interfaceid": "2daa968c-ac9c-4f15-ad2b-7977f5581ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.103772] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395937, 'name': ReconfigVM_Task, 'duration_secs': 0.875191} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.104430] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Reconfigured VM instance instance-00000068 to attach disk [datastore1] b4541d84-b4c3-4441-b5a7-90de2dac3562/b4541d84-b4c3-4441-b5a7-90de2dac3562.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1313.105122] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42797b33-f6ff-4c16-9855-ce5b0adee2cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.111288] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1313.111288] env[61978]: value = "task-1395938" [ 1313.111288] env[61978]: _type = "Task" [ 1313.111288] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.119492] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395938, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.127577] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529315b8-acbc-822f-7b42-51cfdcfaa039, 'name': SearchDatastore_Task, 'duration_secs': 0.023529} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.127838] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1313.128164] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] a5b3f628-edc6-4d30-a179-ffc755f940f7/a5b3f628-edc6-4d30-a179-ffc755f940f7.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1313.128427] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3083435-d06b-46e3-b68c-1c3541b43b7b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.138101] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1313.138101] env[61978]: value = "task-1395939" [ 1313.138101] env[61978]: _type = "Task" [ 1313.138101] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.146043] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.161737] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Releasing lock "refresh_cache-243e7146-46fc-43f4-a83b-cdc58f397f9e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1313.161924] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updated the network info_cache for instance {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1313.162144] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.162303] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.162451] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.162598] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.162810] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.163060] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.163148] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1313.163262] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1313.429912] env[61978]: DEBUG nova.network.neutron [req-b1cab900-a78c-4b30-ad65-f1e5e71a139e req-242754b2-a0d9-41e0-b9bf-91d1374d086a service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updated VIF entry in instance network info cache for port f8cf63ba-ee62-4a3a-85e0-87d88ff84665. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1313.430408] env[61978]: DEBUG nova.network.neutron [req-b1cab900-a78c-4b30-ad65-f1e5e71a139e req-242754b2-a0d9-41e0-b9bf-91d1374d086a service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance_info_cache with network_info: [{"id": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "address": "fa:16:3e:dd:50:05", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cf63ba-ee", "ovs_interfaceid": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.518709] env[61978]: DEBUG nova.network.neutron [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance_info_cache with network_info: [{"id": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "address": "fa:16:3e:de:15:87", "network": {"id": "1a30220b-d64b-485d-84c9-69d73dbf59da", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1743396670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a33ac41ae0247b59c400c6ed9145239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf394483a-0b", "ovs_interfaceid": "f394483a-0b84-4d01-aee1-a50c3a3ee0ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.624585] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395938, 'name': Rename_Task, 'duration_secs': 0.156145} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.624889] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1313.625271] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-451e0ba3-6d41-4e91-830d-e3aeca0ad60c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.632302] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1313.632302] env[61978]: value = "task-1395941" [ 1313.632302] env[61978]: _type = "Task" [ 1313.632302] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.641520] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395941, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.650295] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395939, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.666543] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.666839] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1313.667160] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.667389] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1313.668591] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbadac96-e596-42f9-8f16-007e3bb3ee73 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.678391] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93961297-4e8e-46ec-9a58-b97ab9af703a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.698180] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d2200c-8e4b-4d44-a6aa-a7b7c2ef1a16 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.707076] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37379d53-f22d-443b-a96c-b008fd036805 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.738858] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180015MB free_disk=185GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1313.739132] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.739342] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1313.936413] env[61978]: DEBUG oslo_concurrency.lockutils [req-b1cab900-a78c-4b30-ad65-f1e5e71a139e req-242754b2-a0d9-41e0-b9bf-91d1374d086a service nova] Releasing lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1313.984590] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71823647-9326-48b6-aded-16529aba64b2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.003301] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance '48d05337-7018-4dc2-a6a4-dd80ad3c4eb1' progress to 0 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1314.021568] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Releasing lock "refresh_cache-c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.022677] env[61978]: DEBUG nova.objects.instance [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lazy-loading 'migration_context' on Instance uuid c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1314.141727] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395941, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.149433] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395939, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683407} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.149488] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore1] a5b3f628-edc6-4d30-a179-ffc755f940f7/a5b3f628-edc6-4d30-a179-ffc755f940f7.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1314.149677] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1314.149963] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d05ef720-bfdb-4fe6-a532-3a3bcc24e2d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.156237] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1314.156237] env[61978]: value = "task-1395942" [ 1314.156237] env[61978]: _type = "Task" [ 1314.156237] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.165323] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.510369] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1314.510689] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30fa2245-3273-451d-9e58-897218fb6870 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.517686] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1314.517686] env[61978]: value = "task-1395943" [ 1314.517686] env[61978]: _type = "Task" [ 1314.517686] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.525820] env[61978]: DEBUG nova.objects.base [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1314.526115] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395943, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.526949] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b749d8df-006c-4b4c-92df-9c52484521df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.544769] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54a7ddac-1cd6-4739-b17d-e5bfd264db60 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.549209] env[61978]: DEBUG oslo_vmware.api [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1314.549209] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e689cd-97dd-3fa6-ddf2-8dcd6c96d705" [ 1314.549209] env[61978]: _type = "Task" [ 1314.549209] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.555945] env[61978]: DEBUG oslo_vmware.api [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e689cd-97dd-3fa6-ddf2-8dcd6c96d705, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.642461] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395941, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.665859] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06382} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.665859] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1314.666114] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b975463d-33b0-4599-8533-5f8015fdec1f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.687273] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] a5b3f628-edc6-4d30-a179-ffc755f940f7/a5b3f628-edc6-4d30-a179-ffc755f940f7.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1314.687520] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7b5f211-a664-464f-a3a2-fec932a821e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.705881] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1314.705881] env[61978]: value = "task-1395944" [ 1314.705881] env[61978]: _type = "Task" [ 1314.705881] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.713588] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395944, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.749646] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Applying migration context for instance 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1 as it has an incoming, in-progress migration 25324394-3e2c-4647-9958-44e8e2ecc167. Migration status is migrating {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1314.749878] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Applying migration context for instance c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9 as it has an incoming, in-progress migration 8f4c2454-40e4-4e41-87c6-8f2423d14fa4. Migration status is confirming {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1314.751140] env[61978]: INFO nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating resource usage from migration 25324394-3e2c-4647-9958-44e8e2ecc167 [ 1314.751463] env[61978]: INFO nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating resource usage from migration 8f4c2454-40e4-4e41-87c6-8f2423d14fa4 [ 1314.771435] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 243e7146-46fc-43f4-a83b-cdc58f397f9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1314.771580] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance f33d00ec-72b7-43f2-bc0d-320e3219ae47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1314.771703] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1314.771822] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 7823099f-efdf-46bf-85d7-69e105dfb02c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1314.771941] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Migration 8f4c2454-40e4-4e41-87c6-8f2423d14fa4 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1314.772071] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1314.772190] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1314.772301] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance ac1676dd-affa-49cd-9e7b-a301abcec232 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1314.772412] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance b4541d84-b4c3-4441-b5a7-90de2dac3562 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1314.772521] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance a5b3f628-edc6-4d30-a179-ffc755f940f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1314.772630] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Migration 25324394-3e2c-4647-9958-44e8e2ecc167 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1314.772737] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1314.772921] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1314.773069] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2944MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1314.935726] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31de0f46-d2d8-4714-a40e-d7e2181efa6f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.944321] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef912b9e-4187-42af-b54e-39524917a56d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.975684] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e54808-9ab8-4c2e-b307-5cf2ca0184e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.983285] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59026dde-5a07-4154-8c11-cd4edef0c246 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.998172] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1315.026801] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395943, 'name': PowerOffVM_Task, 'duration_secs': 0.494696} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.027128] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1315.027342] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance '48d05337-7018-4dc2-a6a4-dd80ad3c4eb1' progress to 17 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1315.059235] env[61978]: DEBUG oslo_vmware.api [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e689cd-97dd-3fa6-ddf2-8dcd6c96d705, 'name': SearchDatastore_Task, 'duration_secs': 0.02848} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.059540] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.143671] env[61978]: DEBUG oslo_vmware.api [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395941, 'name': PowerOnVM_Task, 'duration_secs': 1.317035} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.143951] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1315.144188] env[61978]: INFO nova.compute.manager [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Took 10.41 seconds to spawn the instance on the hypervisor. [ 1315.144381] env[61978]: DEBUG nova.compute.manager [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1315.145267] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa65925b-ca72-4c98-9319-daf928908416 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.217073] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395944, 'name': ReconfigVM_Task, 'duration_secs': 0.445738} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.217406] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Reconfigured VM instance instance-00000069 to attach disk [datastore1] a5b3f628-edc6-4d30-a179-ffc755f940f7/a5b3f628-edc6-4d30-a179-ffc755f940f7.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1315.218072] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10b3807d-07f2-40ef-af48-473052ff4b66 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.224389] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1315.224389] env[61978]: value = "task-1395945" [ 1315.224389] env[61978]: _type = "Task" [ 1315.224389] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.232638] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395945, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.502229] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1315.534362] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1315.534753] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1315.534987] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1315.535263] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1315.535503] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1315.535741] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1315.536067] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1315.536340] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1315.536613] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1315.536880] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1315.537202] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1315.545258] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b1eb097-367b-4acf-80d6-10ef101c7f68 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.567115] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1315.567115] env[61978]: value = "task-1395947" [ 1315.567115] env[61978]: _type = "Task" [ 1315.567115] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.576123] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395947, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.662525] env[61978]: INFO nova.compute.manager [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Took 15.70 seconds to build instance. [ 1315.734737] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395945, 'name': Rename_Task, 'duration_secs': 0.128754} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.735056] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1315.735312] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e33a6d7-ee9e-4062-98b4-05e13978394a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.744837] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1315.744837] env[61978]: value = "task-1395948" [ 1315.744837] env[61978]: _type = "Task" [ 1315.744837] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.748904] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.008044] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1316.008322] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.269s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.008627] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.949s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.077148] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395947, 'name': ReconfigVM_Task, 'duration_secs': 0.12925} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.077428] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance '48d05337-7018-4dc2-a6a4-dd80ad3c4eb1' progress to 33 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1316.163866] env[61978]: DEBUG oslo_concurrency.lockutils [None req-05b74b8e-50bb-4ece-a693-6b88ba9f6de1 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "b4541d84-b4c3-4441-b5a7-90de2dac3562" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.210s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.251463] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395948, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.583909] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1316.584270] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1316.584331] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1316.584518] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1316.584673] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1316.584825] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1316.585043] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1316.585215] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1316.585386] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1316.585551] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1316.585726] env[61978]: DEBUG nova.virt.hardware [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1316.591086] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Reconfiguring VM instance instance-00000065 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1316.591517] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8358624f-c767-4559-8f55-4e9b984b1533 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.611744] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1316.611744] env[61978]: value = "task-1395949" [ 1316.611744] env[61978]: _type = "Task" [ 1316.611744] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.620889] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395949, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.682488] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27c8064-2668-44aa-aeda-8852f4395785 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.690146] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67426754-c2fb-465b-811a-0b5d250f2496 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.720106] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a388ad9-9719-4e7b-b14d-d1f76e03aed4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.727308] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ba8f07-080e-4af8-b0d3-03cda94da26c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.741838] env[61978]: DEBUG nova.compute.provider_tree [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1316.750501] env[61978]: DEBUG oslo_vmware.api [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395948, 'name': PowerOnVM_Task, 'duration_secs': 0.583347} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.751370] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1316.751609] env[61978]: INFO nova.compute.manager [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Took 9.74 seconds to spawn the instance on the hypervisor. [ 1316.751806] env[61978]: DEBUG nova.compute.manager [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1316.752588] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcad8cbb-d260-4a0f-a64b-b8ce0d024939 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.124034] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395949, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.247690] env[61978]: DEBUG nova.scheduler.client.report [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1317.271287] env[61978]: DEBUG nova.compute.manager [req-93c4e991-cf36-4ed5-b247-8074047ab769 req-63242de3-6d4a-4227-bbbc-1690f04f036c service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Received event network-changed-377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1317.271287] env[61978]: DEBUG nova.compute.manager [req-93c4e991-cf36-4ed5-b247-8074047ab769 req-63242de3-6d4a-4227-bbbc-1690f04f036c service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Refreshing instance network info cache due to event network-changed-377707c6-c569-41b4-b460-d4ffd83a8c03. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1317.271287] env[61978]: DEBUG oslo_concurrency.lockutils [req-93c4e991-cf36-4ed5-b247-8074047ab769 req-63242de3-6d4a-4227-bbbc-1690f04f036c service nova] Acquiring lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1317.271287] env[61978]: DEBUG oslo_concurrency.lockutils [req-93c4e991-cf36-4ed5-b247-8074047ab769 req-63242de3-6d4a-4227-bbbc-1690f04f036c service nova] Acquired lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.271287] env[61978]: DEBUG nova.network.neutron [req-93c4e991-cf36-4ed5-b247-8074047ab769 req-63242de3-6d4a-4227-bbbc-1690f04f036c service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Refreshing network info cache for port 377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1317.275551] env[61978]: INFO nova.compute.manager [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Took 16.10 seconds to build instance. [ 1317.624831] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395949, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.775357] env[61978]: DEBUG oslo_concurrency.lockutils [None req-2ed7fbc8-265d-4dc0-bfef-4819b5675978 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "a5b3f628-edc6-4d30-a179-ffc755f940f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.611s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1318.003446] env[61978]: DEBUG nova.network.neutron [req-93c4e991-cf36-4ed5-b247-8074047ab769 req-63242de3-6d4a-4227-bbbc-1690f04f036c service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updated VIF entry in instance network info cache for port 377707c6-c569-41b4-b460-d4ffd83a8c03. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1318.003910] env[61978]: DEBUG nova.network.neutron [req-93c4e991-cf36-4ed5-b247-8074047ab769 req-63242de3-6d4a-4227-bbbc-1690f04f036c service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updating instance_info_cache with network_info: [{"id": "377707c6-c569-41b4-b460-d4ffd83a8c03", "address": "fa:16:3e:38:98:1d", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377707c6-c5", "ovs_interfaceid": "377707c6-c569-41b4-b460-d4ffd83a8c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.125700] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395949, 'name': ReconfigVM_Task, 'duration_secs': 1.207957} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.125700] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Reconfigured VM instance instance-00000065 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1318.126535] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31aeca43-5ffe-4b7e-833d-22fe1ada67fb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.153130] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1/48d05337-7018-4dc2-a6a4-dd80ad3c4eb1.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1318.153985] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f73657d-71e9-4683-8af6-448a3b22a867 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.173651] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1318.173651] env[61978]: value = "task-1395950" [ 1318.173651] env[61978]: _type = "Task" [ 1318.173651] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.183994] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395950, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.259858] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.251s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1318.506762] env[61978]: DEBUG oslo_concurrency.lockutils [req-93c4e991-cf36-4ed5-b247-8074047ab769 req-63242de3-6d4a-4227-bbbc-1690f04f036c service nova] Releasing lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.666730] env[61978]: DEBUG oslo_concurrency.lockutils [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Acquiring lock "db960922-12b5-41e7-9de3-312136819bb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.667082] env[61978]: DEBUG oslo_concurrency.lockutils [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lock "db960922-12b5-41e7-9de3-312136819bb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.684297] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.822299] env[61978]: INFO nova.scheduler.client.report [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted allocation for migration 8f4c2454-40e4-4e41-87c6-8f2423d14fa4 [ 1319.028535] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "a5b3f628-edc6-4d30-a179-ffc755f940f7" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.028535] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "a5b3f628-edc6-4d30-a179-ffc755f940f7" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.028535] env[61978]: INFO nova.compute.manager [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Rebooting instance [ 1319.169674] env[61978]: DEBUG nova.compute.manager [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1319.185541] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395950, 'name': ReconfigVM_Task, 'duration_secs': 0.832362} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.185874] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1/48d05337-7018-4dc2-a6a4-dd80ad3c4eb1.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1319.186225] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance '48d05337-7018-4dc2-a6a4-dd80ad3c4eb1' progress to 50 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1319.302312] env[61978]: DEBUG nova.compute.manager [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Received event network-changed-7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1319.302611] env[61978]: DEBUG nova.compute.manager [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Refreshing instance network info cache due to event network-changed-7cefaef7-7dfd-4081-8872-bbdb8d201973. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1319.302927] env[61978]: DEBUG oslo_concurrency.lockutils [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] Acquiring lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1319.303160] env[61978]: DEBUG oslo_concurrency.lockutils [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] Acquired lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.303400] env[61978]: DEBUG nova.network.neutron [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Refreshing network info cache for port 7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1319.329839] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35344e2b-4bb2-4f4c-9d28-1a5dc4951f60 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.702s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.544222] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1319.544434] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquired lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.544616] env[61978]: DEBUG nova.network.neutron [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1319.618905] env[61978]: INFO nova.compute.manager [None req-a63c982f-31a4-494f-a2bd-dbfb5ce467d7 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Get console output [ 1319.619171] env[61978]: WARNING nova.virt.vmwareapi.driver [None req-a63c982f-31a4-494f-a2bd-dbfb5ce467d7 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] The console log is missing. Check your VSPC configuration [ 1319.689899] env[61978]: DEBUG oslo_concurrency.lockutils [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.690164] env[61978]: DEBUG oslo_concurrency.lockutils [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.691693] env[61978]: INFO nova.compute.claims [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1319.696563] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03bcc1c5-a1b9-498d-a2e5-33e7bc602469 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.715541] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd9effc-51cb-4cb5-b0a5-4ac910973ebe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.733721] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance '48d05337-7018-4dc2-a6a4-dd80ad3c4eb1' progress to 67 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1319.999570] env[61978]: DEBUG nova.network.neutron [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updated VIF entry in instance network info cache for port 7cefaef7-7dfd-4081-8872-bbdb8d201973. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1319.999769] env[61978]: DEBUG nova.network.neutron [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updating instance_info_cache with network_info: [{"id": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "address": "fa:16:3e:40:2b:80", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cefaef7-7d", "ovs_interfaceid": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.245228] env[61978]: DEBUG nova.network.neutron [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Updating instance_info_cache with network_info: [{"id": "90e06aab-ecdf-42fb-b192-54ed2dd034ea", "address": "fa:16:3e:e8:31:3b", "network": {"id": "cf176714-7dd5-4d40-8f6d-7e46444187ca", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1909576930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4975f37c081466ab85cf1c21b750c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90e06aab-ec", "ovs_interfaceid": "90e06aab-ecdf-42fb-b192-54ed2dd034ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.275455] env[61978]: DEBUG nova.network.neutron [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Port 9306ddf1-11e6-4d9d-8cda-d4f9bf78420f binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1320.502894] env[61978]: DEBUG oslo_concurrency.lockutils [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] Releasing lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1320.503210] env[61978]: DEBUG nova.compute.manager [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Received event network-changed-90e06aab-ecdf-42fb-b192-54ed2dd034ea {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1320.503389] env[61978]: DEBUG nova.compute.manager [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Refreshing instance network info cache due to event network-changed-90e06aab-ecdf-42fb-b192-54ed2dd034ea. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1320.503586] env[61978]: DEBUG oslo_concurrency.lockutils [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] Acquiring lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1320.748613] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Releasing lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1320.750436] env[61978]: DEBUG oslo_concurrency.lockutils [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] Acquired lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.750642] env[61978]: DEBUG nova.network.neutron [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Refreshing network info cache for port 90e06aab-ecdf-42fb-b192-54ed2dd034ea {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1320.751861] env[61978]: DEBUG nova.compute.manager [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1320.752747] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1c4aa9-5b32-46f1-97c9-51d2ea4d7240 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.860262] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f7d770-f61e-4fb7-a8ce-ae3705e66924 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.869239] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e45c1b0-b7e4-473e-9b39-d75294aac0fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.898099] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a148d46-6cc2-47a0-ba5f-a5e47a19e535 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.904880] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3f3d54-0c3d-47be-a897-9cb7a60c2e59 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.917592] env[61978]: DEBUG nova.compute.provider_tree [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1321.298898] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.299151] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.299347] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.420501] env[61978]: DEBUG nova.scheduler.client.report [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1321.639721] env[61978]: DEBUG nova.network.neutron [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Updated VIF entry in instance network info cache for port 90e06aab-ecdf-42fb-b192-54ed2dd034ea. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1321.640098] env[61978]: DEBUG nova.network.neutron [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Updating instance_info_cache with network_info: [{"id": "90e06aab-ecdf-42fb-b192-54ed2dd034ea", "address": "fa:16:3e:e8:31:3b", "network": {"id": "cf176714-7dd5-4d40-8f6d-7e46444187ca", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1909576930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4975f37c081466ab85cf1c21b750c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90e06aab-ec", "ovs_interfaceid": "90e06aab-ecdf-42fb-b192-54ed2dd034ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.771217] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a81710a-cad3-4d6f-aa88-c539ea66323d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.781100] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Doing hard reboot of VM {{(pid=61978) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1321.781377] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-c39ea008-2ddd-40eb-8ff7-e69657adfa06 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.787971] env[61978]: DEBUG oslo_vmware.api [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1321.787971] env[61978]: value = "task-1395951" [ 1321.787971] env[61978]: _type = "Task" [ 1321.787971] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.795585] env[61978]: DEBUG oslo_vmware.api [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395951, 'name': ResetVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.925928] env[61978]: DEBUG oslo_concurrency.lockutils [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.926396] env[61978]: DEBUG nova.compute.manager [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1322.142527] env[61978]: DEBUG oslo_concurrency.lockutils [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] Releasing lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1322.142829] env[61978]: DEBUG nova.compute.manager [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Received event network-changed-7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1322.143013] env[61978]: DEBUG nova.compute.manager [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Refreshing instance network info cache due to event network-changed-7cefaef7-7dfd-4081-8872-bbdb8d201973. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1322.143255] env[61978]: DEBUG oslo_concurrency.lockutils [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] Acquiring lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.143403] env[61978]: DEBUG oslo_concurrency.lockutils [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] Acquired lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.143571] env[61978]: DEBUG nova.network.neutron [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Refreshing network info cache for port 7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1322.300412] env[61978]: DEBUG oslo_vmware.api [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395951, 'name': ResetVM_Task, 'duration_secs': 0.09058} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.300803] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Did hard reboot of VM {{(pid=61978) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1322.301036] env[61978]: DEBUG nova.compute.manager [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1322.302112] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61fbe1af-4114-480c-9407-c1c8e69a7a39 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.339247] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.340262] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.340262] env[61978]: DEBUG nova.network.neutron [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1322.431621] env[61978]: DEBUG nova.compute.utils [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1322.432953] env[61978]: DEBUG nova.compute.manager [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1322.433152] env[61978]: DEBUG nova.network.neutron [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1322.475749] env[61978]: DEBUG nova.policy [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '017d4d40f08d46909060d9f25b23d3c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e536c7d51774c6db9c29b83651d9380', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1322.823562] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c928f049-aa5f-4e23-9976-4a7de55150ae tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "a5b3f628-edc6-4d30-a179-ffc755f940f7" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.797s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1322.856834] env[61978]: DEBUG nova.network.neutron [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Successfully created port: e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1322.936310] env[61978]: DEBUG nova.compute.manager [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1323.070500] env[61978]: DEBUG nova.network.neutron [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updated VIF entry in instance network info cache for port 7cefaef7-7dfd-4081-8872-bbdb8d201973. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1323.074507] env[61978]: DEBUG nova.network.neutron [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updating instance_info_cache with network_info: [{"id": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "address": "fa:16:3e:40:2b:80", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cefaef7-7d", "ovs_interfaceid": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.243877] env[61978]: DEBUG nova.network.neutron [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance_info_cache with network_info: [{"id": "9306ddf1-11e6-4d9d-8cda-d4f9bf78420f", "address": "fa:16:3e:35:51:b2", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9306ddf1-11", "ovs_interfaceid": "9306ddf1-11e6-4d9d-8cda-d4f9bf78420f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.445118] env[61978]: INFO nova.virt.block_device [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Booting with volume 929bd504-a0b6-42c2-88ae-ee98db6decf8 at /dev/sda [ 1323.455744] env[61978]: DEBUG nova.compute.manager [req-676d6165-b063-4e2c-9a33-777906e7111f req-16fe02e4-3255-417e-9848-26e160876703 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Received event network-changed-90e06aab-ecdf-42fb-b192-54ed2dd034ea {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1323.455951] env[61978]: DEBUG nova.compute.manager [req-676d6165-b063-4e2c-9a33-777906e7111f req-16fe02e4-3255-417e-9848-26e160876703 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Refreshing instance network info cache due to event network-changed-90e06aab-ecdf-42fb-b192-54ed2dd034ea. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1323.456195] env[61978]: DEBUG oslo_concurrency.lockutils [req-676d6165-b063-4e2c-9a33-777906e7111f req-16fe02e4-3255-417e-9848-26e160876703 service nova] Acquiring lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.456351] env[61978]: DEBUG oslo_concurrency.lockutils [req-676d6165-b063-4e2c-9a33-777906e7111f req-16fe02e4-3255-417e-9848-26e160876703 service nova] Acquired lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.456515] env[61978]: DEBUG nova.network.neutron [req-676d6165-b063-4e2c-9a33-777906e7111f req-16fe02e4-3255-417e-9848-26e160876703 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Refreshing network info cache for port 90e06aab-ecdf-42fb-b192-54ed2dd034ea {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1323.492128] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0544eb76-70b4-412c-96ca-5ecf2dc42ce0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.501296] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9cfea2-a895-4df2-a04a-da041c3abadc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.532424] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-815cf121-c797-4345-b1c8-48bb4be938a4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.543641] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b78619-af4d-4a90-80eb-73617ed93a9e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.577770] env[61978]: DEBUG oslo_concurrency.lockutils [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] Releasing lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.577924] env[61978]: DEBUG nova.compute.manager [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Received event network-changed-377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1323.578081] env[61978]: DEBUG nova.compute.manager [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Refreshing instance network info cache due to event network-changed-377707c6-c569-41b4-b460-d4ffd83a8c03. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1323.578407] env[61978]: DEBUG oslo_concurrency.lockutils [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] Acquiring lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.578640] env[61978]: DEBUG oslo_concurrency.lockutils [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] Acquired lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.578896] env[61978]: DEBUG nova.network.neutron [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Refreshing network info cache for port 377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1323.601032] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50087179-e64d-43dd-87c3-61e502b7db29 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.610573] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689b6f7a-8fd2-49c0-b916-00606238d403 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.627429] env[61978]: DEBUG nova.virt.block_device [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Updating existing volume attachment record: a1f02921-40b3-4057-8afe-803824a8e4ed {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1323.746406] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.825097] env[61978]: DEBUG nova.network.neutron [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updated VIF entry in instance network info cache for port 377707c6-c569-41b4-b460-d4ffd83a8c03. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1323.825670] env[61978]: DEBUG nova.network.neutron [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updating instance_info_cache with network_info: [{"id": "377707c6-c569-41b4-b460-d4ffd83a8c03", "address": "fa:16:3e:38:98:1d", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377707c6-c5", "ovs_interfaceid": "377707c6-c569-41b4-b460-d4ffd83a8c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.068046] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "a5b3f628-edc6-4d30-a179-ffc755f940f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.068303] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "a5b3f628-edc6-4d30-a179-ffc755f940f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.068520] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "a5b3f628-edc6-4d30-a179-ffc755f940f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.068708] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "a5b3f628-edc6-4d30-a179-ffc755f940f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.068882] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "a5b3f628-edc6-4d30-a179-ffc755f940f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.072860] env[61978]: INFO nova.compute.manager [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Terminating instance [ 1324.074663] env[61978]: DEBUG nova.compute.manager [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1324.074830] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1324.075766] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da702a3c-dc18-41ee-a5b6-60ecae4a78de {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.086097] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1324.086415] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3873105e-f228-4c8c-9d5c-ccec35f64a92 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.093028] env[61978]: DEBUG oslo_vmware.api [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1324.093028] env[61978]: value = "task-1395952" [ 1324.093028] env[61978]: _type = "Task" [ 1324.093028] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.101077] env[61978]: DEBUG oslo_vmware.api [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395952, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.177783] env[61978]: DEBUG nova.network.neutron [req-676d6165-b063-4e2c-9a33-777906e7111f req-16fe02e4-3255-417e-9848-26e160876703 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Updated VIF entry in instance network info cache for port 90e06aab-ecdf-42fb-b192-54ed2dd034ea. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1324.178205] env[61978]: DEBUG nova.network.neutron [req-676d6165-b063-4e2c-9a33-777906e7111f req-16fe02e4-3255-417e-9848-26e160876703 service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Updating instance_info_cache with network_info: [{"id": "90e06aab-ecdf-42fb-b192-54ed2dd034ea", "address": "fa:16:3e:e8:31:3b", "network": {"id": "cf176714-7dd5-4d40-8f6d-7e46444187ca", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1909576930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4975f37c081466ab85cf1c21b750c10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90e06aab-ec", "ovs_interfaceid": "90e06aab-ecdf-42fb-b192-54ed2dd034ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.272297] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4fa818-82b8-4a58-905a-d51da3a0659b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.291202] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab91a71-1927-4186-b725-77ad9ca02111 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.298783] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance '48d05337-7018-4dc2-a6a4-dd80ad3c4eb1' progress to 83 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1324.328671] env[61978]: DEBUG oslo_concurrency.lockutils [req-43f7a9d9-3a81-4c0c-8f6a-dc2816598fb8 req-5944a416-6423-47c2-b502-542b5e93e3fd service nova] Releasing lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1324.455390] env[61978]: DEBUG nova.compute.manager [req-671186eb-0f24-414a-aaca-c4a45d4574af req-b9a44be6-b618-49c6-a8d5-ddfad736fa7b service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Received event network-vif-plugged-e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1324.456985] env[61978]: DEBUG oslo_concurrency.lockutils [req-671186eb-0f24-414a-aaca-c4a45d4574af req-b9a44be6-b618-49c6-a8d5-ddfad736fa7b service nova] Acquiring lock "db960922-12b5-41e7-9de3-312136819bb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.456985] env[61978]: DEBUG oslo_concurrency.lockutils [req-671186eb-0f24-414a-aaca-c4a45d4574af req-b9a44be6-b618-49c6-a8d5-ddfad736fa7b service nova] Lock "db960922-12b5-41e7-9de3-312136819bb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.456985] env[61978]: DEBUG oslo_concurrency.lockutils [req-671186eb-0f24-414a-aaca-c4a45d4574af req-b9a44be6-b618-49c6-a8d5-ddfad736fa7b service nova] Lock "db960922-12b5-41e7-9de3-312136819bb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.456985] env[61978]: DEBUG nova.compute.manager [req-671186eb-0f24-414a-aaca-c4a45d4574af req-b9a44be6-b618-49c6-a8d5-ddfad736fa7b service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] No waiting events found dispatching network-vif-plugged-e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1324.456985] env[61978]: WARNING nova.compute.manager [req-671186eb-0f24-414a-aaca-c4a45d4574af req-b9a44be6-b618-49c6-a8d5-ddfad736fa7b service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Received unexpected event network-vif-plugged-e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b for instance with vm_state building and task_state block_device_mapping. [ 1324.542407] env[61978]: DEBUG nova.network.neutron [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Successfully updated port: e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1324.602632] env[61978]: DEBUG oslo_vmware.api [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395952, 'name': PowerOffVM_Task, 'duration_secs': 0.189054} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.602632] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1324.602632] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1324.602873] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-920353b7-eb6f-417c-bf9a-f3eae5c92066 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.667857] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1324.668161] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1324.668364] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Deleting the datastore file [datastore1] a5b3f628-edc6-4d30-a179-ffc755f940f7 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1324.668633] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f314ad71-14d4-4458-872d-596582ce5610 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.675853] env[61978]: DEBUG oslo_vmware.api [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1324.675853] env[61978]: value = "task-1395954" [ 1324.675853] env[61978]: _type = "Task" [ 1324.675853] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.680700] env[61978]: DEBUG oslo_concurrency.lockutils [req-676d6165-b063-4e2c-9a33-777906e7111f req-16fe02e4-3255-417e-9848-26e160876703 service nova] Releasing lock "refresh_cache-a5b3f628-edc6-4d30-a179-ffc755f940f7" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1324.683932] env[61978]: DEBUG oslo_vmware.api [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.804912] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1324.805259] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aad67c0e-76f7-42cb-bfc7-7cbfdbd855db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.813066] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1324.813066] env[61978]: value = "task-1395955" [ 1324.813066] env[61978]: _type = "Task" [ 1324.813066] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.820233] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.044866] env[61978]: DEBUG oslo_concurrency.lockutils [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Acquiring lock "refresh_cache-db960922-12b5-41e7-9de3-312136819bb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1325.045251] env[61978]: DEBUG oslo_concurrency.lockutils [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Acquired lock "refresh_cache-db960922-12b5-41e7-9de3-312136819bb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.045251] env[61978]: DEBUG nova.network.neutron [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1325.187620] env[61978]: DEBUG oslo_vmware.api [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129195} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.187821] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1325.187998] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1325.188235] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1325.188479] env[61978]: INFO nova.compute.manager [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1325.188741] env[61978]: DEBUG oslo.service.loopingcall [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1325.188943] env[61978]: DEBUG nova.compute.manager [-] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1325.189051] env[61978]: DEBUG nova.network.neutron [-] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1325.323617] env[61978]: DEBUG oslo_vmware.api [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395955, 'name': PowerOnVM_Task, 'duration_secs': 0.43979} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.323933] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1325.324141] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a089c8-1afb-4a82-a07e-1d9522607428 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance '48d05337-7018-4dc2-a6a4-dd80ad3c4eb1' progress to 100 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1325.576676] env[61978]: DEBUG nova.network.neutron [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1325.698608] env[61978]: DEBUG nova.network.neutron [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Updating instance_info_cache with network_info: [{"id": "e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b", "address": "fa:16:3e:2c:31:04", "network": {"id": "30fc4fd3-be4b-4dc2-a53a-1aa4068b0173", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1566125213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e536c7d51774c6db9c29b83651d9380", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape15a01e2-5d", "ovs_interfaceid": "e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.713349] env[61978]: DEBUG nova.compute.manager [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1325.713893] env[61978]: DEBUG nova.virt.hardware [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1325.714122] env[61978]: DEBUG nova.virt.hardware [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1325.714292] env[61978]: DEBUG nova.virt.hardware [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1325.714480] env[61978]: DEBUG nova.virt.hardware [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1325.714632] env[61978]: DEBUG nova.virt.hardware [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1325.714784] env[61978]: DEBUG nova.virt.hardware [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1325.715083] env[61978]: DEBUG nova.virt.hardware [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1325.715301] env[61978]: DEBUG nova.virt.hardware [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1325.715485] env[61978]: DEBUG nova.virt.hardware [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1325.715651] env[61978]: DEBUG nova.virt.hardware [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1325.715832] env[61978]: DEBUG nova.virt.hardware [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1325.717239] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50352928-5378-4bfc-a967-1065bc63819e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.725836] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc1a797-d327-4d92-aed6-f0f2fe72cf81 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.923253] env[61978]: DEBUG nova.network.neutron [-] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.201098] env[61978]: DEBUG oslo_concurrency.lockutils [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Releasing lock "refresh_cache-db960922-12b5-41e7-9de3-312136819bb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1326.201349] env[61978]: DEBUG nova.compute.manager [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Instance network_info: |[{"id": "e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b", "address": "fa:16:3e:2c:31:04", "network": {"id": "30fc4fd3-be4b-4dc2-a53a-1aa4068b0173", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1566125213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e536c7d51774c6db9c29b83651d9380", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape15a01e2-5d", "ovs_interfaceid": "e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1326.201773] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:31:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1326.209377] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Creating folder: Project (3e536c7d51774c6db9c29b83651d9380). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1326.209677] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24a9302b-781f-4098-9d36-b865921720d6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.223011] env[61978]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1326.223200] env[61978]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61978) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1326.223536] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Folder already exists: Project (3e536c7d51774c6db9c29b83651d9380). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1326.223754] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Creating folder: Instances. Parent ref: group-v296034. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1326.223991] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef62cf56-b2bd-41ec-b7db-6f7287af0860 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.233213] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Created folder: Instances in parent group-v296034. [ 1326.233449] env[61978]: DEBUG oslo.service.loopingcall [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1326.233644] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db960922-12b5-41e7-9de3-312136819bb0] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1326.233852] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91f7c9f3-e484-431b-9393-03655d3b38e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.251953] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1326.251953] env[61978]: value = "task-1395958" [ 1326.251953] env[61978]: _type = "Task" [ 1326.251953] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.259432] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395958, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.425506] env[61978]: INFO nova.compute.manager [-] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Took 1.24 seconds to deallocate network for instance. [ 1326.484128] env[61978]: DEBUG nova.compute.manager [req-2ca9313a-fab5-49a3-9afc-1b432f8b4226 req-691db31e-93b9-46e3-9948-15f55a85436d service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Received event network-changed-e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1326.484294] env[61978]: DEBUG nova.compute.manager [req-2ca9313a-fab5-49a3-9afc-1b432f8b4226 req-691db31e-93b9-46e3-9948-15f55a85436d service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Refreshing instance network info cache due to event network-changed-e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1326.484515] env[61978]: DEBUG oslo_concurrency.lockutils [req-2ca9313a-fab5-49a3-9afc-1b432f8b4226 req-691db31e-93b9-46e3-9948-15f55a85436d service nova] Acquiring lock "refresh_cache-db960922-12b5-41e7-9de3-312136819bb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.484678] env[61978]: DEBUG oslo_concurrency.lockutils [req-2ca9313a-fab5-49a3-9afc-1b432f8b4226 req-691db31e-93b9-46e3-9948-15f55a85436d service nova] Acquired lock "refresh_cache-db960922-12b5-41e7-9de3-312136819bb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.484850] env[61978]: DEBUG nova.network.neutron [req-2ca9313a-fab5-49a3-9afc-1b432f8b4226 req-691db31e-93b9-46e3-9948-15f55a85436d service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Refreshing network info cache for port e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1326.762358] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395958, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.933219] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.933522] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.933763] env[61978]: DEBUG nova.objects.instance [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lazy-loading 'resources' on Instance uuid a5b3f628-edc6-4d30-a179-ffc755f940f7 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1327.189988] env[61978]: DEBUG nova.network.neutron [req-2ca9313a-fab5-49a3-9afc-1b432f8b4226 req-691db31e-93b9-46e3-9948-15f55a85436d service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Updated VIF entry in instance network info cache for port e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1327.190451] env[61978]: DEBUG nova.network.neutron [req-2ca9313a-fab5-49a3-9afc-1b432f8b4226 req-691db31e-93b9-46e3-9948-15f55a85436d service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Updating instance_info_cache with network_info: [{"id": "e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b", "address": "fa:16:3e:2c:31:04", "network": {"id": "30fc4fd3-be4b-4dc2-a53a-1aa4068b0173", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1566125213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e536c7d51774c6db9c29b83651d9380", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape15a01e2-5d", "ovs_interfaceid": "e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.263748] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395958, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.605814] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdbbdd1-1988-4b25-8742-2b49fe70f194 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.614058] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ea2ec6-967b-46a3-967e-f1a2cd02dc77 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.645674] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e711eef-c265-4060-bb97-45f4735ef61a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.654248] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92cee1fa-0569-40c7-8e2d-d5ce9a01bcdf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.668687] env[61978]: DEBUG nova.compute.provider_tree [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.693286] env[61978]: DEBUG oslo_concurrency.lockutils [req-2ca9313a-fab5-49a3-9afc-1b432f8b4226 req-691db31e-93b9-46e3-9948-15f55a85436d service nova] Releasing lock "refresh_cache-db960922-12b5-41e7-9de3-312136819bb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.693599] env[61978]: DEBUG nova.compute.manager [req-2ca9313a-fab5-49a3-9afc-1b432f8b4226 req-691db31e-93b9-46e3-9948-15f55a85436d service nova] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Received event network-vif-deleted-90e06aab-ecdf-42fb-b192-54ed2dd034ea {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1327.765287] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395958, 'name': CreateVM_Task, 'duration_secs': 1.118877} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.765479] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db960922-12b5-41e7-9de3-312136819bb0] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1327.766145] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'disk_bus': None, 'device_type': None, 'attachment_id': 'a1f02921-40b3-4057-8afe-803824a8e4ed', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296041', 'volume_id': '929bd504-a0b6-42c2-88ae-ee98db6decf8', 'name': 'volume-929bd504-a0b6-42c2-88ae-ee98db6decf8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'db960922-12b5-41e7-9de3-312136819bb0', 'attached_at': '', 'detached_at': '', 'volume_id': '929bd504-a0b6-42c2-88ae-ee98db6decf8', 'serial': '929bd504-a0b6-42c2-88ae-ee98db6decf8'}, 'boot_index': 0, 'guest_format': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=61978) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1327.766394] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Root volume attach. Driver type: vmdk {{(pid=61978) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1327.767230] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32aaeba-ee4f-4690-99b0-dc11f409ed34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.774444] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da202c58-3315-42f7-8181-3486d86d077b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.780472] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a63e3c-caa8-4822-8585-50e6e13df4e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.785917] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-6265f5f7-1b22-4153-a407-a50359d9fc82 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.792975] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Waiting for the task: (returnval){ [ 1327.792975] env[61978]: value = "task-1395959" [ 1327.792975] env[61978]: _type = "Task" [ 1327.792975] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.800820] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395959, 'name': RelocateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.081419] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.081730] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.081979] env[61978]: DEBUG nova.compute.manager [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Going to confirm migration 9 {{(pid=61978) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1328.172369] env[61978]: DEBUG nova.scheduler.client.report [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1328.303950] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395959, 'name': RelocateVM_Task} progress is 43%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.649362] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.649603] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.649794] env[61978]: DEBUG nova.network.neutron [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1328.649990] env[61978]: DEBUG nova.objects.instance [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lazy-loading 'info_cache' on Instance uuid 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1328.677910] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.744s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.704342] env[61978]: INFO nova.scheduler.client.report [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Deleted allocations for instance a5b3f628-edc6-4d30-a179-ffc755f940f7 [ 1328.804233] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395959, 'name': RelocateVM_Task} progress is 56%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.213135] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ddc611ae-a7e5-4b49-a125-6c2c90345872 tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "a5b3f628-edc6-4d30-a179-ffc755f940f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.145s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.305560] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395959, 'name': RelocateVM_Task} progress is 71%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.805515] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395959, 'name': RelocateVM_Task} progress is 84%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.918724] env[61978]: DEBUG nova.network.neutron [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance_info_cache with network_info: [{"id": "9306ddf1-11e6-4d9d-8cda-d4f9bf78420f", "address": "fa:16:3e:35:51:b2", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9306ddf1-11", "ovs_interfaceid": "9306ddf1-11e6-4d9d-8cda-d4f9bf78420f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.305522] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395959, 'name': RelocateVM_Task} progress is 97%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.369369] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.369748] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.369868] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "764fdf3c-a6ce-4cd6-9190-d2d43fded0fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.370068] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "764fdf3c-a6ce-4cd6-9190-d2d43fded0fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.370251] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "764fdf3c-a6ce-4cd6-9190-d2d43fded0fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.372635] env[61978]: INFO nova.compute.manager [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Terminating instance [ 1330.374313] env[61978]: DEBUG nova.compute.manager [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1330.374512] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1330.375332] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e4cce8-943f-4168-ae45-e193b148fc56 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.382531] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1330.382754] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee823903-8c9e-44a8-870c-ed3b40e802c3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.389084] env[61978]: DEBUG oslo_vmware.api [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1330.389084] env[61978]: value = "task-1395960" [ 1330.389084] env[61978]: _type = "Task" [ 1330.389084] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.395423] env[61978]: DEBUG oslo_vmware.api [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395960, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.421330] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "refresh_cache-48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.421602] env[61978]: DEBUG nova.objects.instance [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lazy-loading 'migration_context' on Instance uuid 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1330.805115] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395959, 'name': RelocateVM_Task} progress is 98%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.898171] env[61978]: DEBUG oslo_vmware.api [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395960, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.924424] env[61978]: DEBUG nova.objects.base [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Object Instance<48d05337-7018-4dc2-a6a4-dd80ad3c4eb1> lazy-loaded attributes: info_cache,migration_context {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1330.925578] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609cdf9b-7713-4df9-ac8c-c819112bd4cb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.947447] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30ecf4fd-f196-4536-b326-ff5e77e6b931 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.953052] env[61978]: DEBUG oslo_vmware.api [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1330.953052] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527a0fb2-b591-39c4-2fef-3c9ebbddee3e" [ 1330.953052] env[61978]: _type = "Task" [ 1330.953052] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.960947] env[61978]: DEBUG oslo_vmware.api [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527a0fb2-b591-39c4-2fef-3c9ebbddee3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.185980] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "interface-7823099f-efdf-46bf-85d7-69e105dfb02c-ec006995-1071-4f27-8726-d161611d8e5b" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.186248] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-7823099f-efdf-46bf-85d7-69e105dfb02c-ec006995-1071-4f27-8726-d161611d8e5b" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.186625] env[61978]: DEBUG nova.objects.instance [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'flavor' on Instance uuid 7823099f-efdf-46bf-85d7-69e105dfb02c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1331.306603] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395959, 'name': RelocateVM_Task, 'duration_secs': 3.327419} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.306893] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1331.307207] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296041', 'volume_id': '929bd504-a0b6-42c2-88ae-ee98db6decf8', 'name': 'volume-929bd504-a0b6-42c2-88ae-ee98db6decf8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'db960922-12b5-41e7-9de3-312136819bb0', 'attached_at': '', 'detached_at': '', 'volume_id': '929bd504-a0b6-42c2-88ae-ee98db6decf8', 'serial': '929bd504-a0b6-42c2-88ae-ee98db6decf8'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1331.308099] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a16478-392b-4584-8582-a7d02624abae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.325150] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca78d4f4-0fd3-492d-843a-292463a1d208 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.346878] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] volume-929bd504-a0b6-42c2-88ae-ee98db6decf8/volume-929bd504-a0b6-42c2-88ae-ee98db6decf8.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1331.347196] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8478d861-1fcb-4335-9e75-2488756aa420 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.366623] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Waiting for the task: (returnval){ [ 1331.366623] env[61978]: value = "task-1395961" [ 1331.366623] env[61978]: _type = "Task" [ 1331.366623] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.373969] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395961, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.397988] env[61978]: DEBUG oslo_vmware.api [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395960, 'name': PowerOffVM_Task, 'duration_secs': 0.535117} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.398274] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1331.398458] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1331.398709] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51adf045-b976-4349-a2a0-95c33fa5e653 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.462999] env[61978]: DEBUG oslo_vmware.api [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]527a0fb2-b591-39c4-2fef-3c9ebbddee3e, 'name': SearchDatastore_Task, 'duration_secs': 0.017283} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.463349] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.463612] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.478472] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1331.478702] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1331.479062] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Deleting the datastore file [datastore2] 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1331.479234] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f6ea884-02dc-490c-bd7c-38077483210c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.486749] env[61978]: DEBUG oslo_vmware.api [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for the task: (returnval){ [ 1331.486749] env[61978]: value = "task-1395963" [ 1331.486749] env[61978]: _type = "Task" [ 1331.486749] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.495607] env[61978]: DEBUG oslo_vmware.api [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395963, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.785071] env[61978]: DEBUG nova.objects.instance [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'pci_requests' on Instance uuid 7823099f-efdf-46bf-85d7-69e105dfb02c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1331.877411] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395961, 'name': ReconfigVM_Task, 'duration_secs': 0.263845} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.877740] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Reconfigured VM instance instance-0000006a to attach disk [datastore2] volume-929bd504-a0b6-42c2-88ae-ee98db6decf8/volume-929bd504-a0b6-42c2-88ae-ee98db6decf8.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1331.882991] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a86f7cd1-b7ef-4c47-a2e7-fb4184ba1605 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.898836] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Waiting for the task: (returnval){ [ 1331.898836] env[61978]: value = "task-1395964" [ 1331.898836] env[61978]: _type = "Task" [ 1331.898836] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.907379] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395964, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.998908] env[61978]: DEBUG oslo_vmware.api [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Task: {'id': task-1395963, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.437143} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.999192] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1331.999394] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1331.999580] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1331.999761] env[61978]: INFO nova.compute.manager [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1332.000022] env[61978]: DEBUG oslo.service.loopingcall [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1332.000246] env[61978]: DEBUG nova.compute.manager [-] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1332.000346] env[61978]: DEBUG nova.network.neutron [-] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1332.126017] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fe6811-8ed2-4c87-9261-2cabc57e89e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.133529] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f66ada-3584-4cdd-8350-5caf65d19221 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.169521] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b263c71-f05d-4590-bbfd-cf68f974e7b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.178264] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb243578-b653-4a5a-b033-f70374c8d45e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.192789] env[61978]: DEBUG nova.compute.provider_tree [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1332.278324] env[61978]: DEBUG nova.compute.manager [req-01e5741c-2284-41eb-89d2-498021a88dd4 req-4bfc5499-2acc-4858-b05c-62c0a64eee09 service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Received event network-vif-deleted-b790409d-8e9e-4942-9855-0974decac463 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1332.278541] env[61978]: INFO nova.compute.manager [req-01e5741c-2284-41eb-89d2-498021a88dd4 req-4bfc5499-2acc-4858-b05c-62c0a64eee09 service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Neutron deleted interface b790409d-8e9e-4942-9855-0974decac463; detaching it from the instance and deleting it from the info cache [ 1332.278727] env[61978]: DEBUG nova.network.neutron [req-01e5741c-2284-41eb-89d2-498021a88dd4 req-4bfc5499-2acc-4858-b05c-62c0a64eee09 service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.287295] env[61978]: DEBUG nova.objects.base [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Object Instance<7823099f-efdf-46bf-85d7-69e105dfb02c> lazy-loaded attributes: flavor,pci_requests {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1332.287505] env[61978]: DEBUG nova.network.neutron [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1332.354896] env[61978]: DEBUG nova.policy [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '346f982562de48fab1702aca567113ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a4f29a959447159b2f7d194ea94873', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1332.409259] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395964, 'name': ReconfigVM_Task, 'duration_secs': 0.117502} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.409759] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296041', 'volume_id': '929bd504-a0b6-42c2-88ae-ee98db6decf8', 'name': 'volume-929bd504-a0b6-42c2-88ae-ee98db6decf8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'db960922-12b5-41e7-9de3-312136819bb0', 'attached_at': '', 'detached_at': '', 'volume_id': '929bd504-a0b6-42c2-88ae-ee98db6decf8', 'serial': '929bd504-a0b6-42c2-88ae-ee98db6decf8'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1332.410307] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5eae396e-a2b3-4341-bda8-650a79473a40 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.417218] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Waiting for the task: (returnval){ [ 1332.417218] env[61978]: value = "task-1395965" [ 1332.417218] env[61978]: _type = "Task" [ 1332.417218] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.425213] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395965, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.695941] env[61978]: DEBUG nova.scheduler.client.report [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1332.757813] env[61978]: DEBUG nova.network.neutron [-] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.782116] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7615d1cc-ae5e-4f64-8f53-f6b069a6e716 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.791257] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ba814c-2461-4c9e-a7b4-c82f32b25732 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.822070] env[61978]: DEBUG nova.compute.manager [req-01e5741c-2284-41eb-89d2-498021a88dd4 req-4bfc5499-2acc-4858-b05c-62c0a64eee09 service nova] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Detach interface failed, port_id=b790409d-8e9e-4942-9855-0974decac463, reason: Instance 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1332.926675] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395965, 'name': Rename_Task, 'duration_secs': 0.155255} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.926958] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1332.927298] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55376554-0d09-454c-93a7-f4b080948f90 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.933938] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Waiting for the task: (returnval){ [ 1332.933938] env[61978]: value = "task-1395966" [ 1332.933938] env[61978]: _type = "Task" [ 1332.933938] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.940739] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395966, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.260853] env[61978]: INFO nova.compute.manager [-] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Took 1.26 seconds to deallocate network for instance. [ 1333.444237] env[61978]: DEBUG oslo_vmware.api [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1395966, 'name': PowerOnVM_Task, 'duration_secs': 0.490784} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.444569] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1333.444780] env[61978]: INFO nova.compute.manager [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Took 7.73 seconds to spawn the instance on the hypervisor. [ 1333.444969] env[61978]: DEBUG nova.compute.manager [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1333.445753] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efe6a1d-f70b-440b-8f26-28814a57f36c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.706525] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.243s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.767691] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.767967] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.768340] env[61978]: DEBUG nova.objects.instance [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lazy-loading 'resources' on Instance uuid 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1333.857873] env[61978]: DEBUG nova.network.neutron [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Successfully updated port: ec006995-1071-4f27-8726-d161611d8e5b {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1333.966460] env[61978]: INFO nova.compute.manager [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Took 14.29 seconds to build instance. [ 1334.273454] env[61978]: INFO nova.scheduler.client.report [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted allocation for migration 25324394-3e2c-4647-9958-44e8e2ecc167 [ 1334.360857] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.361226] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.361838] env[61978]: DEBUG nova.network.neutron [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1334.417508] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0947a6c2-7db0-4207-801a-c65d5d513eb8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.425645] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd84808-b5f4-4a64-8353-d94ab591a2c8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.457179] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9159b5-9677-4b64-8fb0-095ac2688033 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.460717] env[61978]: DEBUG nova.compute.manager [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Received event network-vif-plugged-ec006995-1071-4f27-8726-d161611d8e5b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1334.460936] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] Acquiring lock "7823099f-efdf-46bf-85d7-69e105dfb02c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.461168] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] Lock "7823099f-efdf-46bf-85d7-69e105dfb02c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.461341] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] Lock "7823099f-efdf-46bf-85d7-69e105dfb02c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.461512] env[61978]: DEBUG nova.compute.manager [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] No waiting events found dispatching network-vif-plugged-ec006995-1071-4f27-8726-d161611d8e5b {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1334.461677] env[61978]: WARNING nova.compute.manager [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Received unexpected event network-vif-plugged-ec006995-1071-4f27-8726-d161611d8e5b for instance with vm_state active and task_state None. [ 1334.461835] env[61978]: DEBUG nova.compute.manager [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Received event network-changed-ec006995-1071-4f27-8726-d161611d8e5b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1334.461986] env[61978]: DEBUG nova.compute.manager [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Refreshing instance network info cache due to event network-changed-ec006995-1071-4f27-8726-d161611d8e5b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1334.462168] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] Acquiring lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.470313] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52828ed-67ee-4125-802d-5e4024f85f71 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.472464] env[61978]: DEBUG oslo_concurrency.lockutils [None req-867dee97-6ca1-4440-b7d8-1c3108e82eb3 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lock "db960922-12b5-41e7-9de3-312136819bb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.805s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.483125] env[61978]: DEBUG nova.compute.provider_tree [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1334.628518] env[61978]: DEBUG nova.compute.manager [req-54f31c5f-291b-4a9f-9866-cafc5242b220 req-c206d591-c527-498c-bf18-abe2559d23fe service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Received event network-changed-e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1334.628518] env[61978]: DEBUG nova.compute.manager [req-54f31c5f-291b-4a9f-9866-cafc5242b220 req-c206d591-c527-498c-bf18-abe2559d23fe service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Refreshing instance network info cache due to event network-changed-e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1334.628518] env[61978]: DEBUG oslo_concurrency.lockutils [req-54f31c5f-291b-4a9f-9866-cafc5242b220 req-c206d591-c527-498c-bf18-abe2559d23fe service nova] Acquiring lock "refresh_cache-db960922-12b5-41e7-9de3-312136819bb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.628666] env[61978]: DEBUG oslo_concurrency.lockutils [req-54f31c5f-291b-4a9f-9866-cafc5242b220 req-c206d591-c527-498c-bf18-abe2559d23fe service nova] Acquired lock "refresh_cache-db960922-12b5-41e7-9de3-312136819bb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.628707] env[61978]: DEBUG nova.network.neutron [req-54f31c5f-291b-4a9f-9866-cafc5242b220 req-c206d591-c527-498c-bf18-abe2559d23fe service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Refreshing network info cache for port e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1334.778774] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6659cc1-31c1-4bc0-aadf-53a6814a8b56 tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.697s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.900815] env[61978]: WARNING nova.network.neutron [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] 3b4d30cc-d9a1-4180-b6eb-881241a1c0f4 already exists in list: networks containing: ['3b4d30cc-d9a1-4180-b6eb-881241a1c0f4']. ignoring it [ 1334.987957] env[61978]: DEBUG nova.scheduler.client.report [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1335.279550] env[61978]: DEBUG nova.network.neutron [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updating instance_info_cache with network_info: [{"id": "377707c6-c569-41b4-b460-d4ffd83a8c03", "address": "fa:16:3e:38:98:1d", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377707c6-c5", "ovs_interfaceid": "377707c6-c569-41b4-b460-d4ffd83a8c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ec006995-1071-4f27-8726-d161611d8e5b", "address": "fa:16:3e:27:98:41", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec006995-10", "ovs_interfaceid": "ec006995-1071-4f27-8726-d161611d8e5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.377610] env[61978]: DEBUG oslo_concurrency.lockutils [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.377945] env[61978]: DEBUG oslo_concurrency.lockutils [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.378170] env[61978]: DEBUG oslo_concurrency.lockutils [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.378389] env[61978]: DEBUG oslo_concurrency.lockutils [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.378561] env[61978]: DEBUG oslo_concurrency.lockutils [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.380784] env[61978]: INFO nova.compute.manager [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Terminating instance [ 1335.382542] env[61978]: DEBUG nova.compute.manager [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1335.382722] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1335.383556] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5a7eb6-18ed-400a-bd3f-f85412c6eac0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.391061] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1335.391308] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ca0a38d-1d27-4948-b479-801624a0a09d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.398021] env[61978]: DEBUG oslo_vmware.api [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1335.398021] env[61978]: value = "task-1395967" [ 1335.398021] env[61978]: _type = "Task" [ 1335.398021] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.405418] env[61978]: DEBUG oslo_vmware.api [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.406221] env[61978]: DEBUG nova.network.neutron [req-54f31c5f-291b-4a9f-9866-cafc5242b220 req-c206d591-c527-498c-bf18-abe2559d23fe service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Updated VIF entry in instance network info cache for port e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1335.406557] env[61978]: DEBUG nova.network.neutron [req-54f31c5f-291b-4a9f-9866-cafc5242b220 req-c206d591-c527-498c-bf18-abe2559d23fe service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Updating instance_info_cache with network_info: [{"id": "e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b", "address": "fa:16:3e:2c:31:04", "network": {"id": "30fc4fd3-be4b-4dc2-a53a-1aa4068b0173", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1566125213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e536c7d51774c6db9c29b83651d9380", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape15a01e2-5d", "ovs_interfaceid": "e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.492525] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.724s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.519080] env[61978]: INFO nova.scheduler.client.report [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Deleted allocations for instance 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa [ 1335.782198] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.782877] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.783055] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.783348] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] Acquired lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.783555] env[61978]: DEBUG nova.network.neutron [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Refreshing network info cache for port ec006995-1071-4f27-8726-d161611d8e5b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1335.785258] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620e102b-62f8-409a-94b7-1c9201c5c238 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.802363] env[61978]: DEBUG nova.virt.hardware [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1335.802661] env[61978]: DEBUG nova.virt.hardware [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1335.802836] env[61978]: DEBUG nova.virt.hardware [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1335.803037] env[61978]: DEBUG nova.virt.hardware [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1335.803230] env[61978]: DEBUG nova.virt.hardware [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1335.803347] env[61978]: DEBUG nova.virt.hardware [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1335.803591] env[61978]: DEBUG nova.virt.hardware [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1335.803718] env[61978]: DEBUG nova.virt.hardware [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1335.803891] env[61978]: DEBUG nova.virt.hardware [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1335.804074] env[61978]: DEBUG nova.virt.hardware [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1335.804260] env[61978]: DEBUG nova.virt.hardware [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1335.811095] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Reconfiguring VM to attach interface {{(pid=61978) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1335.812134] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3d0e014-5574-44a3-8c9f-197b42d75046 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.836787] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "001d90e9-9c22-4044-b550-d3acd778222e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.837113] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "001d90e9-9c22-4044-b550-d3acd778222e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.841768] env[61978]: DEBUG oslo_vmware.api [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1335.841768] env[61978]: value = "task-1395968" [ 1335.841768] env[61978]: _type = "Task" [ 1335.841768] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.852905] env[61978]: DEBUG oslo_vmware.api [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395968, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.908026] env[61978]: DEBUG oslo_vmware.api [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395967, 'name': PowerOffVM_Task, 'duration_secs': 0.20697} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.908533] env[61978]: DEBUG oslo_concurrency.lockutils [req-54f31c5f-291b-4a9f-9866-cafc5242b220 req-c206d591-c527-498c-bf18-abe2559d23fe service nova] Releasing lock "refresh_cache-db960922-12b5-41e7-9de3-312136819bb0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.908985] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1335.909203] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1335.909509] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4b1d9b4-b281-432b-8093-58d46bbd6b62 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.969338] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1335.969594] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1335.969791] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleting the datastore file [datastore2] 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1335.970082] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-271b7559-400e-42c5-b2fe-c43d64a411e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.976331] env[61978]: DEBUG oslo_vmware.api [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1335.976331] env[61978]: value = "task-1395970" [ 1335.976331] env[61978]: _type = "Task" [ 1335.976331] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.985262] env[61978]: DEBUG oslo_vmware.api [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395970, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.028350] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8f8532aa-f912-4e1b-b2b8-3db1146078dc tempest-SecurityGroupsTestJSON-1434142361 tempest-SecurityGroupsTestJSON-1434142361-project-member] Lock "764fdf3c-a6ce-4cd6-9190-d2d43fded0fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.658s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1336.340148] env[61978]: DEBUG nova.compute.manager [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1336.353398] env[61978]: DEBUG oslo_vmware.api [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395968, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.486516] env[61978]: DEBUG oslo_vmware.api [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395970, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153975} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.486775] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1336.486972] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1336.487209] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1336.487419] env[61978]: INFO nova.compute.manager [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1336.487702] env[61978]: DEBUG oslo.service.loopingcall [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1336.487941] env[61978]: DEBUG nova.compute.manager [-] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1336.488081] env[61978]: DEBUG nova.network.neutron [-] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1336.533915] env[61978]: DEBUG nova.network.neutron [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updated VIF entry in instance network info cache for port ec006995-1071-4f27-8726-d161611d8e5b. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1336.534394] env[61978]: DEBUG nova.network.neutron [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updating instance_info_cache with network_info: [{"id": "377707c6-c569-41b4-b460-d4ffd83a8c03", "address": "fa:16:3e:38:98:1d", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377707c6-c5", "ovs_interfaceid": "377707c6-c569-41b4-b460-d4ffd83a8c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ec006995-1071-4f27-8726-d161611d8e5b", "address": "fa:16:3e:27:98:41", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec006995-10", "ovs_interfaceid": "ec006995-1071-4f27-8726-d161611d8e5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.820769] env[61978]: DEBUG nova.compute.manager [req-4a18a784-4d48-4ff0-bc67-4776487fa8cf req-81d0acba-2001-4309-b195-67c5b9af2d5b service nova] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Received event network-vif-deleted-9306ddf1-11e6-4d9d-8cda-d4f9bf78420f {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1336.821276] env[61978]: INFO nova.compute.manager [req-4a18a784-4d48-4ff0-bc67-4776487fa8cf req-81d0acba-2001-4309-b195-67c5b9af2d5b service nova] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Neutron deleted interface 9306ddf1-11e6-4d9d-8cda-d4f9bf78420f; detaching it from the instance and deleting it from the info cache [ 1336.821276] env[61978]: DEBUG nova.network.neutron [req-4a18a784-4d48-4ff0-bc67-4776487fa8cf req-81d0acba-2001-4309-b195-67c5b9af2d5b service nova] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.853783] env[61978]: DEBUG oslo_vmware.api [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395968, 'name': ReconfigVM_Task, 'duration_secs': 0.775111} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.854260] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.854716] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Reconfigured VM to attach interface {{(pid=61978) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1336.865220] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.865462] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1336.866938] env[61978]: INFO nova.compute.claims [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1337.036935] env[61978]: DEBUG oslo_concurrency.lockutils [req-bb46dcf5-3489-4181-a64f-b78058c0e8f0 req-adaa06a6-1a4c-4a91-8b9e-02157e7f33d5 service nova] Releasing lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1337.253253] env[61978]: DEBUG nova.network.neutron [-] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.324711] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e61c72d5-9620-4895-b5b7-2ba5ef559241 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.335592] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7cc2b8-6ba1-42d0-b9bb-1ca4cbb214fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.364750] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6af12bf3-4457-469e-8a90-bf80a4717658 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-7823099f-efdf-46bf-85d7-69e105dfb02c-ec006995-1071-4f27-8726-d161611d8e5b" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.178s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.365900] env[61978]: DEBUG nova.compute.manager [req-4a18a784-4d48-4ff0-bc67-4776487fa8cf req-81d0acba-2001-4309-b195-67c5b9af2d5b service nova] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Detach interface failed, port_id=9306ddf1-11e6-4d9d-8cda-d4f9bf78420f, reason: Instance 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1337.755960] env[61978]: INFO nova.compute.manager [-] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Took 1.27 seconds to deallocate network for instance. [ 1338.016135] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab63ede8-8238-4a37-93f1-4c0c4c2948ad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.027425] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f87228-f836-4021-b19c-05d2b8d797f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.060619] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4407ee83-2cd5-4c9e-8e97-65728a0abeed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.068636] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f7382f4-d27b-44fe-9d02-83de467da93c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.082988] env[61978]: DEBUG nova.compute.provider_tree [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.262374] env[61978]: DEBUG oslo_concurrency.lockutils [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.586666] env[61978]: DEBUG nova.scheduler.client.report [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1338.909800] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "interface-7823099f-efdf-46bf-85d7-69e105dfb02c-ec006995-1071-4f27-8726-d161611d8e5b" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.910222] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-7823099f-efdf-46bf-85d7-69e105dfb02c-ec006995-1071-4f27-8726-d161611d8e5b" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.092056] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.226s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.092475] env[61978]: DEBUG nova.compute.manager [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1339.095352] env[61978]: DEBUG oslo_concurrency.lockutils [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.833s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.095352] env[61978]: DEBUG oslo_concurrency.lockutils [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.115460] env[61978]: INFO nova.scheduler.client.report [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted allocations for instance 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1 [ 1339.413493] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.413733] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.414657] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c952cc6-239c-4089-a73f-a33bc8a0aea1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.434520] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7789d8c2-3f81-4292-9e65-fcd47b16fb12 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.461018] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Reconfiguring VM to detach interface {{(pid=61978) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1339.461018] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34f05497-ce45-409d-95e4-df3c2f710130 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.478517] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquiring lock "c27f7dd1-bdb0-450b-a58e-fe9afafdb368" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.478939] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lock "c27f7dd1-bdb0-450b-a58e-fe9afafdb368" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.483299] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1339.483299] env[61978]: value = "task-1395971" [ 1339.483299] env[61978]: _type = "Task" [ 1339.483299] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.492337] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.600151] env[61978]: DEBUG nova.compute.utils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1339.601964] env[61978]: DEBUG nova.compute.manager [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1339.602243] env[61978]: DEBUG nova.network.neutron [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1339.623926] env[61978]: DEBUG oslo_concurrency.lockutils [None req-26f2689b-9221-49d1-8615-01799f6e26ef tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "48d05337-7018-4dc2-a6a4-dd80ad3c4eb1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.246s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.641611] env[61978]: DEBUG nova.policy [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f20b272502341bd80be470f98554d1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d95ebcafdca43b8a1636e21c7258803', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1339.880301] env[61978]: DEBUG nova.network.neutron [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Successfully created port: 20f9b0e2-05be-4020-bc8a-5e6547b1b840 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1339.983845] env[61978]: DEBUG nova.compute.manager [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1339.995313] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.105818] env[61978]: DEBUG nova.compute.manager [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1340.500863] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.511209] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.511486] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.512962] env[61978]: INFO nova.compute.claims [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1340.999686] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.115475] env[61978]: DEBUG nova.compute.manager [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1341.145847] env[61978]: DEBUG nova.virt.hardware [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1341.146274] env[61978]: DEBUG nova.virt.hardware [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1341.146454] env[61978]: DEBUG nova.virt.hardware [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1341.146709] env[61978]: DEBUG nova.virt.hardware [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1341.146902] env[61978]: DEBUG nova.virt.hardware [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1341.147207] env[61978]: DEBUG nova.virt.hardware [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1341.147546] env[61978]: DEBUG nova.virt.hardware [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1341.147726] env[61978]: DEBUG nova.virt.hardware [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1341.147933] env[61978]: DEBUG nova.virt.hardware [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1341.148167] env[61978]: DEBUG nova.virt.hardware [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1341.148408] env[61978]: DEBUG nova.virt.hardware [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1341.149357] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07913d3b-f55d-47f1-b4c9-eb08f88f18ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.158423] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74550e02-4883-43dd-91e5-382c26a17e23 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.277470] env[61978]: DEBUG nova.compute.manager [req-29058615-bb09-4cfe-b6b2-6377b637f89b req-20a46868-77fa-4b49-a93b-258dfcfc80a8 service nova] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Received event network-vif-plugged-20f9b0e2-05be-4020-bc8a-5e6547b1b840 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1341.277809] env[61978]: DEBUG oslo_concurrency.lockutils [req-29058615-bb09-4cfe-b6b2-6377b637f89b req-20a46868-77fa-4b49-a93b-258dfcfc80a8 service nova] Acquiring lock "001d90e9-9c22-4044-b550-d3acd778222e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.278092] env[61978]: DEBUG oslo_concurrency.lockutils [req-29058615-bb09-4cfe-b6b2-6377b637f89b req-20a46868-77fa-4b49-a93b-258dfcfc80a8 service nova] Lock "001d90e9-9c22-4044-b550-d3acd778222e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.278333] env[61978]: DEBUG oslo_concurrency.lockutils [req-29058615-bb09-4cfe-b6b2-6377b637f89b req-20a46868-77fa-4b49-a93b-258dfcfc80a8 service nova] Lock "001d90e9-9c22-4044-b550-d3acd778222e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.278531] env[61978]: DEBUG nova.compute.manager [req-29058615-bb09-4cfe-b6b2-6377b637f89b req-20a46868-77fa-4b49-a93b-258dfcfc80a8 service nova] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] No waiting events found dispatching network-vif-plugged-20f9b0e2-05be-4020-bc8a-5e6547b1b840 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1341.278711] env[61978]: WARNING nova.compute.manager [req-29058615-bb09-4cfe-b6b2-6377b637f89b req-20a46868-77fa-4b49-a93b-258dfcfc80a8 service nova] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Received unexpected event network-vif-plugged-20f9b0e2-05be-4020-bc8a-5e6547b1b840 for instance with vm_state building and task_state spawning. [ 1341.376901] env[61978]: DEBUG nova.network.neutron [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Successfully updated port: 20f9b0e2-05be-4020-bc8a-5e6547b1b840 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1341.502165] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.636646] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169ea695-c7cc-47ea-9a84-ae24d9a1e76a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.643870] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93ebeb7-4ca4-46b1-855d-fbff750d3026 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.674440] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337edbd8-0dd8-4523-9217-0ef6d6926e83 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.681628] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7303e7-df8b-4871-9c98-4dfc2371b2b2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.694674] env[61978]: DEBUG nova.compute.provider_tree [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.879250] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "refresh_cache-001d90e9-9c22-4044-b550-d3acd778222e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.879529] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "refresh_cache-001d90e9-9c22-4044-b550-d3acd778222e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.879577] env[61978]: DEBUG nova.network.neutron [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1342.000636] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.198014] env[61978]: DEBUG nova.scheduler.client.report [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1342.412837] env[61978]: DEBUG nova.network.neutron [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1342.501547] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.542904] env[61978]: DEBUG nova.network.neutron [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Updating instance_info_cache with network_info: [{"id": "20f9b0e2-05be-4020-bc8a-5e6547b1b840", "address": "fa:16:3e:76:11:e7", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20f9b0e2-05", "ovs_interfaceid": "20f9b0e2-05be-4020-bc8a-5e6547b1b840", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.702950] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.703453] env[61978]: DEBUG nova.compute.manager [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1343.002162] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.045819] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "refresh_cache-001d90e9-9c22-4044-b550-d3acd778222e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.046224] env[61978]: DEBUG nova.compute.manager [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Instance network_info: |[{"id": "20f9b0e2-05be-4020-bc8a-5e6547b1b840", "address": "fa:16:3e:76:11:e7", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20f9b0e2-05", "ovs_interfaceid": "20f9b0e2-05be-4020-bc8a-5e6547b1b840", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1343.046701] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:11:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20f9b0e2-05be-4020-bc8a-5e6547b1b840', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1343.054216] env[61978]: DEBUG oslo.service.loopingcall [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1343.054467] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1343.054752] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc391cb6-1abc-4f5b-a8a3-be9b3feabad6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.074453] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1343.074453] env[61978]: value = "task-1395972" [ 1343.074453] env[61978]: _type = "Task" [ 1343.074453] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.081632] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395972, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.208518] env[61978]: DEBUG nova.compute.utils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1343.210069] env[61978]: DEBUG nova.compute.manager [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Not allocating networking since 'none' was specified. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1343.306100] env[61978]: DEBUG nova.compute.manager [req-a2421208-082d-40c2-8b40-b4194249f6a3 req-fd9d14af-7ddf-46e1-bbb1-cfe833dccdc7 service nova] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Received event network-changed-20f9b0e2-05be-4020-bc8a-5e6547b1b840 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1343.306387] env[61978]: DEBUG nova.compute.manager [req-a2421208-082d-40c2-8b40-b4194249f6a3 req-fd9d14af-7ddf-46e1-bbb1-cfe833dccdc7 service nova] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Refreshing instance network info cache due to event network-changed-20f9b0e2-05be-4020-bc8a-5e6547b1b840. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1343.306680] env[61978]: DEBUG oslo_concurrency.lockutils [req-a2421208-082d-40c2-8b40-b4194249f6a3 req-fd9d14af-7ddf-46e1-bbb1-cfe833dccdc7 service nova] Acquiring lock "refresh_cache-001d90e9-9c22-4044-b550-d3acd778222e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.306974] env[61978]: DEBUG oslo_concurrency.lockutils [req-a2421208-082d-40c2-8b40-b4194249f6a3 req-fd9d14af-7ddf-46e1-bbb1-cfe833dccdc7 service nova] Acquired lock "refresh_cache-001d90e9-9c22-4044-b550-d3acd778222e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.307322] env[61978]: DEBUG nova.network.neutron [req-a2421208-082d-40c2-8b40-b4194249f6a3 req-fd9d14af-7ddf-46e1-bbb1-cfe833dccdc7 service nova] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Refreshing network info cache for port 20f9b0e2-05be-4020-bc8a-5e6547b1b840 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1343.503101] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.583377] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395972, 'name': CreateVM_Task, 'duration_secs': 0.315289} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.583561] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1343.584243] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.584426] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.584756] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1343.585029] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccfc7f6d-2802-40b5-8e10-721c926b2f8a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.589274] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1343.589274] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5287dd0c-eab1-f037-1647-6cc1eb336476" [ 1343.589274] env[61978]: _type = "Task" [ 1343.589274] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.596489] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5287dd0c-eab1-f037-1647-6cc1eb336476, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.711400] env[61978]: DEBUG nova.compute.manager [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1343.994718] env[61978]: DEBUG nova.network.neutron [req-a2421208-082d-40c2-8b40-b4194249f6a3 req-fd9d14af-7ddf-46e1-bbb1-cfe833dccdc7 service nova] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Updated VIF entry in instance network info cache for port 20f9b0e2-05be-4020-bc8a-5e6547b1b840. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1343.995029] env[61978]: DEBUG nova.network.neutron [req-a2421208-082d-40c2-8b40-b4194249f6a3 req-fd9d14af-7ddf-46e1-bbb1-cfe833dccdc7 service nova] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Updating instance_info_cache with network_info: [{"id": "20f9b0e2-05be-4020-bc8a-5e6547b1b840", "address": "fa:16:3e:76:11:e7", "network": {"id": "69a878ea-ec7d-4793-9c48-f0f5d454a6fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1711420523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d95ebcafdca43b8a1636e21c7258803", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20f9b0e2-05", "ovs_interfaceid": "20f9b0e2-05be-4020-bc8a-5e6547b1b840", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.005899] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.100102] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5287dd0c-eab1-f037-1647-6cc1eb336476, 'name': SearchDatastore_Task, 'duration_secs': 0.008966} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.100436] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.100752] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1344.101015] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.101182] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.101375] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1344.101649] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0721d61e-fe30-4e78-803a-401de86aff46 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.109891] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1344.110083] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1344.110752] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf117b86-adff-44f1-b445-5e2c2e272f05 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.115741] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1344.115741] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bf2c9e-011c-b973-1520-f3f49f9de040" [ 1344.115741] env[61978]: _type = "Task" [ 1344.115741] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.122993] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bf2c9e-011c-b973-1520-f3f49f9de040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.503027] env[61978]: DEBUG oslo_concurrency.lockutils [req-a2421208-082d-40c2-8b40-b4194249f6a3 req-fd9d14af-7ddf-46e1-bbb1-cfe833dccdc7 service nova] Releasing lock "refresh_cache-001d90e9-9c22-4044-b550-d3acd778222e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.510527] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.626040] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bf2c9e-011c-b973-1520-f3f49f9de040, 'name': SearchDatastore_Task, 'duration_secs': 0.008689} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.626974] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-469f35b0-4af7-4c0b-8f6f-499cadf44ccd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.631937] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1344.631937] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c7d2f7-2e34-d1e5-8096-7062dc4c72dc" [ 1344.631937] env[61978]: _type = "Task" [ 1344.631937] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.639412] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c7d2f7-2e34-d1e5-8096-7062dc4c72dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.720038] env[61978]: DEBUG nova.compute.manager [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1344.740743] env[61978]: DEBUG nova.virt.hardware [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1344.740995] env[61978]: DEBUG nova.virt.hardware [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1344.741184] env[61978]: DEBUG nova.virt.hardware [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1344.741381] env[61978]: DEBUG nova.virt.hardware [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1344.741535] env[61978]: DEBUG nova.virt.hardware [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1344.741740] env[61978]: DEBUG nova.virt.hardware [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1344.741982] env[61978]: DEBUG nova.virt.hardware [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1344.742169] env[61978]: DEBUG nova.virt.hardware [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1344.742347] env[61978]: DEBUG nova.virt.hardware [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1344.742519] env[61978]: DEBUG nova.virt.hardware [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1344.742698] env[61978]: DEBUG nova.virt.hardware [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1344.743566] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a92f3f1-4f7a-4d30-a03c-bd0d0a075882 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.751337] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a51997-006a-4f1c-b027-8421975e25e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.765908] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Instance VIF info [] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1344.771246] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Creating folder: Project (83ec36e4901b4eeabde5deac6924d2a6). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1344.771501] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-648ccb4d-6019-40c3-817a-b1bc68a5d385 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.781252] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Created folder: Project (83ec36e4901b4eeabde5deac6924d2a6) in parent group-v295764. [ 1344.781431] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Creating folder: Instances. Parent ref: group-v296046. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1344.781633] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ba83dd8-0a4f-4b0e-a27a-f8e46e376cc8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.791486] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Created folder: Instances in parent group-v296046. [ 1344.791714] env[61978]: DEBUG oslo.service.loopingcall [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1344.791896] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1344.792097] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-422f94f3-815c-434f-b0c8-b039bba7a70c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.808201] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1344.808201] env[61978]: value = "task-1395975" [ 1344.808201] env[61978]: _type = "Task" [ 1344.808201] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.815025] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395975, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.007734] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.142736] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c7d2f7-2e34-d1e5-8096-7062dc4c72dc, 'name': SearchDatastore_Task, 'duration_secs': 0.010471} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.143010] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.143281] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 001d90e9-9c22-4044-b550-d3acd778222e/001d90e9-9c22-4044-b550-d3acd778222e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1345.143547] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-68bbf23e-bf67-430e-8eab-749a3379ac22 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.149831] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1345.149831] env[61978]: value = "task-1395976" [ 1345.149831] env[61978]: _type = "Task" [ 1345.149831] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.157420] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395976, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.319047] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1395975, 'name': CreateVM_Task, 'duration_secs': 0.231758} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.319181] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1345.319609] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.319762] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.320156] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1345.320406] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-467c4ea2-c0cb-45be-8081-09bd9cf57527 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.327250] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1345.327250] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5262e4cb-5939-12f8-7d69-783693f1821f" [ 1345.327250] env[61978]: _type = "Task" [ 1345.327250] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.335216] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5262e4cb-5939-12f8-7d69-783693f1821f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.508897] env[61978]: DEBUG oslo_vmware.api [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1395971, 'name': ReconfigVM_Task, 'duration_secs': 5.748823} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.509221] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.509452] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Reconfigured VM to detach interface {{(pid=61978) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1345.516649] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.516895] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.517124] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.517349] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.517528] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.520921] env[61978]: INFO nova.compute.manager [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Terminating instance [ 1345.523639] env[61978]: DEBUG nova.compute.manager [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1345.523939] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1345.524265] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f7a683e-2830-47bf-92f5-8cd3a55ca569 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.532154] env[61978]: DEBUG oslo_vmware.api [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1345.532154] env[61978]: value = "task-1395977" [ 1345.532154] env[61978]: _type = "Task" [ 1345.532154] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.542820] env[61978]: DEBUG oslo_vmware.api [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395977, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.662101] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395976, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439679} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.662476] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 001d90e9-9c22-4044-b550-d3acd778222e/001d90e9-9c22-4044-b550-d3acd778222e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1345.662748] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1345.663050] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-101f1744-5ad6-4017-92a8-f8fb1401f849 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.669225] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1345.669225] env[61978]: value = "task-1395978" [ 1345.669225] env[61978]: _type = "Task" [ 1345.669225] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.677512] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395978, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.836693] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5262e4cb-5939-12f8-7d69-783693f1821f, 'name': SearchDatastore_Task, 'duration_secs': 0.058743} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.837009] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.837292] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1345.837525] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.837678] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.837862] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1345.838139] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e920a66-c69c-4497-89d0-7bd69936f0e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.848181] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1345.848384] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1345.849118] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d28b468b-f3f9-4b55-a30e-22f039ad0db9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.853962] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1345.853962] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c8d6eb-da87-cb63-7bed-8b982ff48429" [ 1345.853962] env[61978]: _type = "Task" [ 1345.853962] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.862753] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c8d6eb-da87-cb63-7bed-8b982ff48429, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.042028] env[61978]: DEBUG oslo_vmware.api [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395977, 'name': PowerOffVM_Task, 'duration_secs': 0.176442} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.042390] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1346.042515] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1346.042717] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296021', 'volume_id': '73762ddd-195c-421a-95df-d5230c3e7c5e', 'name': 'volume-73762ddd-195c-421a-95df-d5230c3e7c5e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9', 'attached_at': '2024-11-04T15:11:10.000000', 'detached_at': '', 'volume_id': '73762ddd-195c-421a-95df-d5230c3e7c5e', 'serial': '73762ddd-195c-421a-95df-d5230c3e7c5e'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1346.043476] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3853c2-b5d7-4c10-9bee-45c4a7d82359 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.060973] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8a9312-abe9-4b36-bb86-9f6985dd30e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.067121] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3096a5-5c7b-4a71-a73d-aebe5ce64bbf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.083935] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45faa863-9703-4a47-a57d-f6a98559a874 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.098076] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] The volume has not been displaced from its original location: [datastore2] volume-73762ddd-195c-421a-95df-d5230c3e7c5e/volume-73762ddd-195c-421a-95df-d5230c3e7c5e.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1346.103658] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Reconfiguring VM instance instance-00000062 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1346.103969] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb3dde05-7084-4825-9d6a-903735c03732 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.122709] env[61978]: DEBUG oslo_vmware.api [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1346.122709] env[61978]: value = "task-1395979" [ 1346.122709] env[61978]: _type = "Task" [ 1346.122709] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.131284] env[61978]: DEBUG oslo_vmware.api [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395979, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.177787] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395978, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063678} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.178070] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1346.178852] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa61bb7b-31bf-4132-bc04-75a76f39cc82 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.201577] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] 001d90e9-9c22-4044-b550-d3acd778222e/001d90e9-9c22-4044-b550-d3acd778222e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1346.201879] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41d98c13-14b3-4ca5-a303-220caad1843a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.220556] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1346.220556] env[61978]: value = "task-1395980" [ 1346.220556] env[61978]: _type = "Task" [ 1346.220556] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.231401] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395980, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.366244] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c8d6eb-da87-cb63-7bed-8b982ff48429, 'name': SearchDatastore_Task, 'duration_secs': 0.008411} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.367401] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82c9eadb-8854-49a4-98c4-e76e170e8632 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.373012] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1346.373012] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cf59ce-dc46-7102-53ec-63db6e542c5e" [ 1346.373012] env[61978]: _type = "Task" [ 1346.373012] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.382697] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cf59ce-dc46-7102-53ec-63db6e542c5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.633153] env[61978]: DEBUG oslo_vmware.api [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395979, 'name': ReconfigVM_Task, 'duration_secs': 0.153707} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.633455] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Reconfigured VM instance instance-00000062 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1346.638324] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1e041aa-f015-4d09-b0a9-066502cb6484 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.653090] env[61978]: DEBUG oslo_vmware.api [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1346.653090] env[61978]: value = "task-1395981" [ 1346.653090] env[61978]: _type = "Task" [ 1346.653090] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.660560] env[61978]: DEBUG oslo_vmware.api [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395981, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.730204] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.868137] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.868548] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.868879] env[61978]: DEBUG nova.network.neutron [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1346.883305] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52cf59ce-dc46-7102-53ec-63db6e542c5e, 'name': SearchDatastore_Task, 'duration_secs': 0.011431} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.884163] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.884431] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] c27f7dd1-bdb0-450b-a58e-fe9afafdb368/c27f7dd1-bdb0-450b-a58e-fe9afafdb368.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1346.884698] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f08feb88-9393-4c92-bc97-ec8a87b87f42 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.891994] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1346.891994] env[61978]: value = "task-1395982" [ 1346.891994] env[61978]: _type = "Task" [ 1346.891994] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.899960] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395982, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.163647] env[61978]: DEBUG oslo_vmware.api [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395981, 'name': ReconfigVM_Task, 'duration_secs': 0.280885} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.166022] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296021', 'volume_id': '73762ddd-195c-421a-95df-d5230c3e7c5e', 'name': 'volume-73762ddd-195c-421a-95df-d5230c3e7c5e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9', 'attached_at': '2024-11-04T15:11:10.000000', 'detached_at': '', 'volume_id': '73762ddd-195c-421a-95df-d5230c3e7c5e', 'serial': '73762ddd-195c-421a-95df-d5230c3e7c5e'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1347.166022] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1347.166022] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76ee7b3-b812-458f-9f79-cc70f2dc86e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.174021] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1347.174021] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85532012-0e9c-4f03-836a-8cb308cc7ce2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.231095] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395980, 'name': ReconfigVM_Task, 'duration_secs': 0.560685} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.231650] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Reconfigured VM instance instance-0000006b to attach disk [datastore2] 001d90e9-9c22-4044-b550-d3acd778222e/001d90e9-9c22-4044-b550-d3acd778222e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1347.234279] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4bd5fa2f-d2b3-4184-b9e0-900a8f81bd84 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.235682] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1347.236049] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1347.236383] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleting the datastore file [datastore2] c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1347.238025] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f9b6e1b-ca6e-4e88-b436-36533e3d188e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.241195] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1347.241195] env[61978]: value = "task-1395984" [ 1347.241195] env[61978]: _type = "Task" [ 1347.241195] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.245437] env[61978]: DEBUG oslo_vmware.api [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1347.245437] env[61978]: value = "task-1395985" [ 1347.245437] env[61978]: _type = "Task" [ 1347.245437] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.256783] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395984, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.263061] env[61978]: DEBUG oslo_vmware.api [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395985, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.402344] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395982, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.626372] env[61978]: INFO nova.network.neutron [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Port ec006995-1071-4f27-8726-d161611d8e5b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1347.626718] env[61978]: DEBUG nova.network.neutron [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updating instance_info_cache with network_info: [{"id": "377707c6-c569-41b4-b460-d4ffd83a8c03", "address": "fa:16:3e:38:98:1d", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377707c6-c5", "ovs_interfaceid": "377707c6-c569-41b4-b460-d4ffd83a8c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.755637] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395984, 'name': Rename_Task, 'duration_secs': 0.428199} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.758710] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1347.759450] env[61978]: DEBUG oslo_vmware.api [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395985, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.384267} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.759450] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fcd10039-7683-4925-8af2-d35bcd1889e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.760860] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1347.761060] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1347.761668] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1347.761668] env[61978]: INFO nova.compute.manager [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Took 2.24 seconds to destroy the instance on the hypervisor. [ 1347.761668] env[61978]: DEBUG oslo.service.loopingcall [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1347.761826] env[61978]: DEBUG nova.compute.manager [-] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1347.761916] env[61978]: DEBUG nova.network.neutron [-] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1347.767209] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1347.767209] env[61978]: value = "task-1395986" [ 1347.767209] env[61978]: _type = "Task" [ 1347.767209] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.774562] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395986, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.905834] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395982, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.721565} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.906264] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] c27f7dd1-bdb0-450b-a58e-fe9afafdb368/c27f7dd1-bdb0-450b-a58e-fe9afafdb368.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1347.906707] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1347.906928] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd357d7f-c963-4013-a5d2-c479647463fb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.914212] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1347.914212] env[61978]: value = "task-1395987" [ 1347.914212] env[61978]: _type = "Task" [ 1347.914212] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.923869] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395987, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.964811] env[61978]: DEBUG nova.compute.manager [req-59230560-da86-42a1-ba9a-99e35c0536a0 req-54d7f601-cdff-4041-9bb7-26666458f7fc service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Received event network-changed-377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1347.965152] env[61978]: DEBUG nova.compute.manager [req-59230560-da86-42a1-ba9a-99e35c0536a0 req-54d7f601-cdff-4041-9bb7-26666458f7fc service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Refreshing instance network info cache due to event network-changed-377707c6-c569-41b4-b460-d4ffd83a8c03. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1347.965556] env[61978]: DEBUG oslo_concurrency.lockutils [req-59230560-da86-42a1-ba9a-99e35c0536a0 req-54d7f601-cdff-4041-9bb7-26666458f7fc service nova] Acquiring lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1348.133021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.139162] env[61978]: DEBUG oslo_concurrency.lockutils [req-59230560-da86-42a1-ba9a-99e35c0536a0 req-54d7f601-cdff-4041-9bb7-26666458f7fc service nova] Acquired lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.139162] env[61978]: DEBUG nova.network.neutron [req-59230560-da86-42a1-ba9a-99e35c0536a0 req-54d7f601-cdff-4041-9bb7-26666458f7fc service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Refreshing network info cache for port 377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1348.276915] env[61978]: DEBUG oslo_vmware.api [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395986, 'name': PowerOnVM_Task, 'duration_secs': 0.492171} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.277287] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1348.277541] env[61978]: INFO nova.compute.manager [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Took 7.16 seconds to spawn the instance on the hypervisor. [ 1348.277775] env[61978]: DEBUG nova.compute.manager [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1348.278627] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871f0e44-338c-4003-851e-db590a8d64c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.290030] env[61978]: DEBUG nova.compute.manager [req-ba9994c6-df62-42f0-8ab7-44809f154b38 req-19eb7421-7725-4b4b-a006-fd6518c034ff service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Received event network-vif-deleted-f394483a-0b84-4d01-aee1-a50c3a3ee0ff {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1348.290030] env[61978]: INFO nova.compute.manager [req-ba9994c6-df62-42f0-8ab7-44809f154b38 req-19eb7421-7725-4b4b-a006-fd6518c034ff service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Neutron deleted interface f394483a-0b84-4d01-aee1-a50c3a3ee0ff; detaching it from the instance and deleting it from the info cache [ 1348.290030] env[61978]: DEBUG nova.network.neutron [req-ba9994c6-df62-42f0-8ab7-44809f154b38 req-19eb7421-7725-4b4b-a006-fd6518c034ff service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.423487] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395987, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073366} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.423734] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1348.424496] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23da0e2f-ac1a-4deb-8593-9f886662f7f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.443913] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] c27f7dd1-bdb0-450b-a58e-fe9afafdb368/c27f7dd1-bdb0-450b-a58e-fe9afafdb368.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1348.444206] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-930864aa-ad25-45cc-8720-d156e7b0b323 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.463172] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1348.463172] env[61978]: value = "task-1395988" [ 1348.463172] env[61978]: _type = "Task" [ 1348.463172] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.473172] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395988, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.473616] env[61978]: DEBUG nova.compute.manager [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Stashing vm_state: active {{(pid=61978) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1348.553890] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.554183] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.554375] env[61978]: INFO nova.compute.manager [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Shelving [ 1348.637336] env[61978]: DEBUG oslo_concurrency.lockutils [None req-7e224e76-9357-4434-8683-eb2a6397bfe8 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-7823099f-efdf-46bf-85d7-69e105dfb02c-ec006995-1071-4f27-8726-d161611d8e5b" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.727s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.674065] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "interface-b4541d84-b4c3-4441-b5a7-90de2dac3562-ec006995-1071-4f27-8726-d161611d8e5b" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.674209] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-b4541d84-b4c3-4441-b5a7-90de2dac3562-ec006995-1071-4f27-8726-d161611d8e5b" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.674535] env[61978]: DEBUG nova.objects.instance [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'flavor' on Instance uuid b4541d84-b4c3-4441-b5a7-90de2dac3562 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1348.752829] env[61978]: DEBUG nova.network.neutron [-] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.793354] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2fea1df0-774f-4a94-a1af-e83803be010c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.798462] env[61978]: INFO nova.compute.manager [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Took 11.95 seconds to build instance. [ 1348.807078] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75242631-c6b4-4770-ac3a-e1deee9a6fae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.842086] env[61978]: DEBUG nova.compute.manager [req-ba9994c6-df62-42f0-8ab7-44809f154b38 req-19eb7421-7725-4b4b-a006-fd6518c034ff service nova] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Detach interface failed, port_id=f394483a-0b84-4d01-aee1-a50c3a3ee0ff, reason: Instance c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1348.856072] env[61978]: DEBUG nova.network.neutron [req-59230560-da86-42a1-ba9a-99e35c0536a0 req-54d7f601-cdff-4041-9bb7-26666458f7fc service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updated VIF entry in instance network info cache for port 377707c6-c569-41b4-b460-d4ffd83a8c03. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1348.856415] env[61978]: DEBUG nova.network.neutron [req-59230560-da86-42a1-ba9a-99e35c0536a0 req-54d7f601-cdff-4041-9bb7-26666458f7fc service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updating instance_info_cache with network_info: [{"id": "377707c6-c569-41b4-b460-d4ffd83a8c03", "address": "fa:16:3e:38:98:1d", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377707c6-c5", "ovs_interfaceid": "377707c6-c569-41b4-b460-d4ffd83a8c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.972939] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395988, 'name': ReconfigVM_Task, 'duration_secs': 0.265761} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.973262] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Reconfigured VM instance instance-0000006c to attach disk [datastore2] c27f7dd1-bdb0-450b-a58e-fe9afafdb368/c27f7dd1-bdb0-450b-a58e-fe9afafdb368.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1348.973892] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77862cdd-341b-4704-adf6-0786e2dfc9e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.982410] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1348.982410] env[61978]: value = "task-1395989" [ 1348.982410] env[61978]: _type = "Task" [ 1348.982410] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.992136] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395989, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.995931] env[61978]: DEBUG oslo_concurrency.lockutils [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.996189] env[61978]: DEBUG oslo_concurrency.lockutils [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.067031] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1349.067031] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4389cdfc-3631-447e-9760-a53111d0be8d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.073978] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1349.073978] env[61978]: value = "task-1395990" [ 1349.073978] env[61978]: _type = "Task" [ 1349.073978] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.083074] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.256805] env[61978]: INFO nova.compute.manager [-] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Took 1.49 seconds to deallocate network for instance. [ 1349.301208] env[61978]: DEBUG oslo_concurrency.lockutils [None req-700c3a7b-9fa6-4b42-99c3-77f55ad7851f tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "001d90e9-9c22-4044-b550-d3acd778222e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.464s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.359724] env[61978]: DEBUG oslo_concurrency.lockutils [req-59230560-da86-42a1-ba9a-99e35c0536a0 req-54d7f601-cdff-4041-9bb7-26666458f7fc service nova] Releasing lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.492798] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395989, 'name': Rename_Task, 'duration_secs': 0.12977} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.493712] env[61978]: DEBUG nova.objects.instance [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'pci_requests' on Instance uuid b4541d84-b4c3-4441-b5a7-90de2dac3562 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1349.494792] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1349.495414] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57d29bc0-d151-4544-a497-9dbe55cfd47c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.502725] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1349.502725] env[61978]: value = "task-1395991" [ 1349.502725] env[61978]: _type = "Task" [ 1349.502725] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.505495] env[61978]: INFO nova.compute.claims [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1349.517316] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395991, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.583055] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395990, 'name': PowerOffVM_Task, 'duration_secs': 0.332888} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.583879] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1349.584698] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6859baf1-550e-4b17-a6c0-4409632fa898 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.603342] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea816a2-0813-4e1c-9feb-4b4f81cc6bce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.802968] env[61978]: INFO nova.compute.manager [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Took 0.55 seconds to detach 1 volumes for instance. [ 1349.805478] env[61978]: DEBUG nova.compute.manager [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Deleting volume: 73762ddd-195c-421a-95df-d5230c3e7c5e {{(pid=61978) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1349.996939] env[61978]: DEBUG nova.objects.base [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1349.997193] env[61978]: DEBUG nova.network.neutron [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1350.014904] env[61978]: INFO nova.compute.resource_tracker [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating resource usage from migration 5150d922-c55e-4cf5-aa83-26e59b3838ba [ 1350.022042] env[61978]: DEBUG oslo_vmware.api [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395991, 'name': PowerOnVM_Task, 'duration_secs': 0.436664} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.022313] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1350.022714] env[61978]: INFO nova.compute.manager [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Took 5.30 seconds to spawn the instance on the hypervisor. [ 1350.022714] env[61978]: DEBUG nova.compute.manager [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1350.023493] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7bf9318-732e-4454-9963-8e6b23fcacb3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.029234] env[61978]: DEBUG nova.compute.manager [req-46bb5c43-6543-4dc1-ac84-b0c17b1f1ef5 req-3cb3441d-921c-4b53-b53f-089daf8bf6e5 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Received event network-changed-7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1350.029422] env[61978]: DEBUG nova.compute.manager [req-46bb5c43-6543-4dc1-ac84-b0c17b1f1ef5 req-3cb3441d-921c-4b53-b53f-089daf8bf6e5 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Refreshing instance network info cache due to event network-changed-7cefaef7-7dfd-4081-8872-bbdb8d201973. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1350.029640] env[61978]: DEBUG oslo_concurrency.lockutils [req-46bb5c43-6543-4dc1-ac84-b0c17b1f1ef5 req-3cb3441d-921c-4b53-b53f-089daf8bf6e5 service nova] Acquiring lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.029780] env[61978]: DEBUG oslo_concurrency.lockutils [req-46bb5c43-6543-4dc1-ac84-b0c17b1f1ef5 req-3cb3441d-921c-4b53-b53f-089daf8bf6e5 service nova] Acquired lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.029941] env[61978]: DEBUG nova.network.neutron [req-46bb5c43-6543-4dc1-ac84-b0c17b1f1ef5 req-3cb3441d-921c-4b53-b53f-089daf8bf6e5 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Refreshing network info cache for port 7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1350.067156] env[61978]: DEBUG nova.policy [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '346f982562de48fab1702aca567113ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3a4f29a959447159b2f7d194ea94873', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1350.073291] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "001d90e9-9c22-4044-b550-d3acd778222e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.073527] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "001d90e9-9c22-4044-b550-d3acd778222e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.073733] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "001d90e9-9c22-4044-b550-d3acd778222e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.073920] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "001d90e9-9c22-4044-b550-d3acd778222e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.074104] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "001d90e9-9c22-4044-b550-d3acd778222e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.078225] env[61978]: INFO nova.compute.manager [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Terminating instance [ 1350.080083] env[61978]: DEBUG nova.compute.manager [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1350.080288] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1350.081352] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9243e809-658e-4784-81b3-a8cd124cc287 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.090646] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1350.090646] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83929550-fe41-43fc-87a1-e417dc3a70dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.096638] env[61978]: DEBUG oslo_vmware.api [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1350.096638] env[61978]: value = "task-1395993" [ 1350.096638] env[61978]: _type = "Task" [ 1350.096638] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.103887] env[61978]: DEBUG oslo_vmware.api [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395993, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.114057] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1350.114441] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0b4455d9-db8f-4d8f-8aa7-fda6f6e721a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.124601] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1350.124601] env[61978]: value = "task-1395994" [ 1350.124601] env[61978]: _type = "Task" [ 1350.124601] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.134846] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395994, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.186630] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553771fd-689e-4249-9d8e-a10941d54dde {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.194011] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aecd671e-4430-457a-b85a-80f8657f3277 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.224017] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850fa140-00e0-4cc0-a147-29662e01f7e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.232013] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50acce82-d59a-4e34-9c58-aebd6e70bb10 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.247330] env[61978]: DEBUG nova.compute.provider_tree [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.346063] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.546492] env[61978]: INFO nova.compute.manager [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Took 10.06 seconds to build instance. [ 1350.608806] env[61978]: DEBUG oslo_vmware.api [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395993, 'name': PowerOffVM_Task, 'duration_secs': 0.185438} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.609099] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1350.609285] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1350.609547] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67c7b870-64f2-4357-8463-b0babc679ec5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.634180] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395994, 'name': CreateSnapshot_Task, 'duration_secs': 0.488319} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.634588] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1350.635691] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5028c498-97cd-4fe9-91c6-ca8ffdadb3c1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.666913] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1350.667118] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1350.667365] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleting the datastore file [datastore2] 001d90e9-9c22-4044-b550-d3acd778222e {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1350.667637] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c948f21-4891-47dc-8495-3882154235b7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.677012] env[61978]: DEBUG oslo_vmware.api [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for the task: (returnval){ [ 1350.677012] env[61978]: value = "task-1395996" [ 1350.677012] env[61978]: _type = "Task" [ 1350.677012] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.684670] env[61978]: DEBUG oslo_vmware.api [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395996, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.751854] env[61978]: DEBUG nova.scheduler.client.report [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1350.765521] env[61978]: DEBUG nova.network.neutron [req-46bb5c43-6543-4dc1-ac84-b0c17b1f1ef5 req-3cb3441d-921c-4b53-b53f-089daf8bf6e5 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updated VIF entry in instance network info cache for port 7cefaef7-7dfd-4081-8872-bbdb8d201973. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1350.765842] env[61978]: DEBUG nova.network.neutron [req-46bb5c43-6543-4dc1-ac84-b0c17b1f1ef5 req-3cb3441d-921c-4b53-b53f-089daf8bf6e5 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updating instance_info_cache with network_info: [{"id": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "address": "fa:16:3e:40:2b:80", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cefaef7-7d", "ovs_interfaceid": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.053234] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ea1e9d70-4947-4aa7-8812-eb7a4f7b2199 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lock "c27f7dd1-bdb0-450b-a58e-fe9afafdb368" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.571s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.157018] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1351.157414] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-03c2a470-30b7-42eb-85bd-f5073e093205 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.165931] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1351.165931] env[61978]: value = "task-1395997" [ 1351.165931] env[61978]: _type = "Task" [ 1351.165931] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.173331] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395997, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.185272] env[61978]: DEBUG oslo_vmware.api [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Task: {'id': task-1395996, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172898} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.185511] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1351.185700] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1351.185881] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1351.186074] env[61978]: INFO nova.compute.manager [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1351.186321] env[61978]: DEBUG oslo.service.loopingcall [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1351.186550] env[61978]: DEBUG nova.compute.manager [-] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1351.186646] env[61978]: DEBUG nova.network.neutron [-] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1351.258172] env[61978]: DEBUG oslo_concurrency.lockutils [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.262s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.258420] env[61978]: INFO nova.compute.manager [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Migrating [ 1351.265103] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.919s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.265344] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.270489] env[61978]: DEBUG oslo_concurrency.lockutils [req-46bb5c43-6543-4dc1-ac84-b0c17b1f1ef5 req-3cb3441d-921c-4b53-b53f-089daf8bf6e5 service nova] Releasing lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.292425] env[61978]: INFO nova.scheduler.client.report [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted allocations for instance c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9 [ 1351.554915] env[61978]: INFO nova.compute.manager [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Rebuilding instance [ 1351.574322] env[61978]: DEBUG nova.compute.manager [req-c5f4e42f-59b9-4680-8ad7-9c556385249a req-bb5c89a8-4433-4b85-9339-7c6f222f058b service nova] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Received event network-vif-deleted-20f9b0e2-05be-4020-bc8a-5e6547b1b840 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1351.574663] env[61978]: INFO nova.compute.manager [req-c5f4e42f-59b9-4680-8ad7-9c556385249a req-bb5c89a8-4433-4b85-9339-7c6f222f058b service nova] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Neutron deleted interface 20f9b0e2-05be-4020-bc8a-5e6547b1b840; detaching it from the instance and deleting it from the info cache [ 1351.575012] env[61978]: DEBUG nova.network.neutron [req-c5f4e42f-59b9-4680-8ad7-9c556385249a req-bb5c89a8-4433-4b85-9339-7c6f222f058b service nova] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.610224] env[61978]: DEBUG nova.compute.manager [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1351.611095] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f44e76-d444-4822-8952-2ba7af24968f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.657465] env[61978]: DEBUG nova.network.neutron [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Successfully updated port: ec006995-1071-4f27-8726-d161611d8e5b {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1351.676789] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395997, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.778388] env[61978]: DEBUG oslo_concurrency.lockutils [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.778598] env[61978]: DEBUG oslo_concurrency.lockutils [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.778899] env[61978]: DEBUG nova.network.neutron [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1351.804393] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9531c872-be6c-4910-a941-a7dbaddeaf49 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.287s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.024594] env[61978]: DEBUG nova.network.neutron [-] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.079761] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df8219f3-3b1a-4830-b089-2eadc3f5a4d2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.089045] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa52584-7ac1-461e-90c3-727dc13f0a60 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.119454] env[61978]: DEBUG nova.compute.manager [req-c5f4e42f-59b9-4680-8ad7-9c556385249a req-bb5c89a8-4433-4b85-9339-7c6f222f058b service nova] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Detach interface failed, port_id=20f9b0e2-05be-4020-bc8a-5e6547b1b840, reason: Instance 001d90e9-9c22-4044-b550-d3acd778222e could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1352.122291] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1352.122676] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb51fb87-32e0-4e6a-9ec9-482ba9200193 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.129719] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1352.129719] env[61978]: value = "task-1395998" [ 1352.129719] env[61978]: _type = "Task" [ 1352.129719] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.137922] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395998, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.161122] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1352.161348] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.161569] env[61978]: DEBUG nova.network.neutron [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1352.182076] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395997, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.497422] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "f33d00ec-72b7-43f2-bc0d-320e3219ae47" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.497713] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "f33d00ec-72b7-43f2-bc0d-320e3219ae47" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.497941] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "f33d00ec-72b7-43f2-bc0d-320e3219ae47-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.498247] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "f33d00ec-72b7-43f2-bc0d-320e3219ae47-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.498485] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "f33d00ec-72b7-43f2-bc0d-320e3219ae47-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.500729] env[61978]: INFO nova.compute.manager [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Terminating instance [ 1352.502754] env[61978]: DEBUG nova.compute.manager [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1352.502948] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1352.503795] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f849795f-296c-4b08-bfeb-be675f1cfe70 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.511471] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1352.511710] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cff99dca-736f-4f09-80a8-190b545c6c94 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.514705] env[61978]: DEBUG nova.network.neutron [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance_info_cache with network_info: [{"id": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "address": "fa:16:3e:dd:50:05", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cf63ba-ee", "ovs_interfaceid": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.518615] env[61978]: DEBUG oslo_vmware.api [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1352.518615] env[61978]: value = "task-1395999" [ 1352.518615] env[61978]: _type = "Task" [ 1352.518615] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.528179] env[61978]: INFO nova.compute.manager [-] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Took 1.34 seconds to deallocate network for instance. [ 1352.528565] env[61978]: DEBUG oslo_vmware.api [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395999, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.640232] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1395998, 'name': PowerOffVM_Task, 'duration_secs': 0.169195} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.640580] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1352.640580] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1352.641386] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df31c796-f8bc-4043-bff9-f81948bbb22d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.648910] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1352.649239] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9339ddd0-453b-4ce0-a087-81fc776866b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.675365] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1352.675485] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1352.675605] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Deleting the datastore file [datastore2] c27f7dd1-bdb0-450b-a58e-fe9afafdb368 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1352.676385] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebad34e3-020d-4655-8046-aa5315ec4475 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.681139] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1395997, 'name': CloneVM_Task, 'duration_secs': 1.365454} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.681827] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Created linked-clone VM from snapshot [ 1352.682560] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a32efd2-d65f-4135-a7e8-1e82c9ca742c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.688630] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1352.688630] env[61978]: value = "task-1396001" [ 1352.688630] env[61978]: _type = "Task" [ 1352.688630] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.692556] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Uploading image c3d6fb3f-8d33-4f48-a74c-692608eb4ac9 {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1352.698733] env[61978]: WARNING nova.network.neutron [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] 3b4d30cc-d9a1-4180-b6eb-881241a1c0f4 already exists in list: networks containing: ['3b4d30cc-d9a1-4180-b6eb-881241a1c0f4']. ignoring it [ 1352.703875] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.719800] env[61978]: DEBUG oslo_vmware.rw_handles [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1352.719800] env[61978]: value = "vm-296050" [ 1352.719800] env[61978]: _type = "VirtualMachine" [ 1352.719800] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1352.720119] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6a839516-761c-496c-b91a-174defdd765f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.726165] env[61978]: DEBUG oslo_vmware.rw_handles [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lease: (returnval){ [ 1352.726165] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520a054a-34af-a045-558b-b7c9682d1bd3" [ 1352.726165] env[61978]: _type = "HttpNfcLease" [ 1352.726165] env[61978]: } obtained for exporting VM: (result){ [ 1352.726165] env[61978]: value = "vm-296050" [ 1352.726165] env[61978]: _type = "VirtualMachine" [ 1352.726165] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1352.726453] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the lease: (returnval){ [ 1352.726453] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520a054a-34af-a045-558b-b7c9682d1bd3" [ 1352.726453] env[61978]: _type = "HttpNfcLease" [ 1352.726453] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1352.732128] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1352.732128] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520a054a-34af-a045-558b-b7c9682d1bd3" [ 1352.732128] env[61978]: _type = "HttpNfcLease" [ 1352.732128] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1352.968292] env[61978]: DEBUG nova.network.neutron [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updating instance_info_cache with network_info: [{"id": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "address": "fa:16:3e:40:2b:80", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cefaef7-7d", "ovs_interfaceid": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ec006995-1071-4f27-8726-d161611d8e5b", "address": "fa:16:3e:27:98:41", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec006995-10", "ovs_interfaceid": "ec006995-1071-4f27-8726-d161611d8e5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.016718] env[61978]: DEBUG oslo_concurrency.lockutils [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.027510] env[61978]: DEBUG oslo_vmware.api [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1395999, 'name': PowerOffVM_Task, 'duration_secs': 0.213651} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.028262] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1353.028468] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1353.028713] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa083adc-38e9-45d2-b649-5d78ff91c67a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.034633] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.034874] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.035100] env[61978]: DEBUG nova.objects.instance [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lazy-loading 'resources' on Instance uuid 001d90e9-9c22-4044-b550-d3acd778222e {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1353.092979] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1353.093165] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1353.093359] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleting the datastore file [datastore2] f33d00ec-72b7-43f2-bc0d-320e3219ae47 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1353.093623] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e095638-6c60-4c4a-9c1e-6368cf142ed6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.100536] env[61978]: DEBUG oslo_vmware.api [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1353.100536] env[61978]: value = "task-1396004" [ 1353.100536] env[61978]: _type = "Task" [ 1353.100536] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.110403] env[61978]: DEBUG oslo_vmware.api [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1396004, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.202101] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098258} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.202371] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1353.202561] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1353.202744] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1353.235055] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1353.235055] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520a054a-34af-a045-558b-b7c9682d1bd3" [ 1353.235055] env[61978]: _type = "HttpNfcLease" [ 1353.235055] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1353.235371] env[61978]: DEBUG oslo_vmware.rw_handles [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1353.235371] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520a054a-34af-a045-558b-b7c9682d1bd3" [ 1353.235371] env[61978]: _type = "HttpNfcLease" [ 1353.235371] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1353.236091] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce94f5c-47d7-436e-8609-3bfd53eb92d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.244218] env[61978]: DEBUG oslo_vmware.rw_handles [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a44f03-88ed-b286-fcd9-4314cfc6d831/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1353.244413] env[61978]: DEBUG oslo_vmware.rw_handles [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a44f03-88ed-b286-fcd9-4314cfc6d831/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1353.332817] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-57cfda16-a3b3-4c05-95e5-7c6063a69a56 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.472337] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.472337] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.472337] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.472987] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120eccb1-a109-447a-b4e3-dfb3a7128c11 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.490620] env[61978]: DEBUG nova.virt.hardware [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1353.490845] env[61978]: DEBUG nova.virt.hardware [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1353.491019] env[61978]: DEBUG nova.virt.hardware [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1353.491214] env[61978]: DEBUG nova.virt.hardware [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1353.491396] env[61978]: DEBUG nova.virt.hardware [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1353.491562] env[61978]: DEBUG nova.virt.hardware [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1353.491772] env[61978]: DEBUG nova.virt.hardware [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1353.491940] env[61978]: DEBUG nova.virt.hardware [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1353.492130] env[61978]: DEBUG nova.virt.hardware [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1353.492302] env[61978]: DEBUG nova.virt.hardware [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1353.492483] env[61978]: DEBUG nova.virt.hardware [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1353.498682] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Reconfiguring VM to attach interface {{(pid=61978) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1353.498993] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e502e41f-23bf-4a4d-bfe2-cd5f4f481b2f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.517333] env[61978]: DEBUG oslo_vmware.api [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1353.517333] env[61978]: value = "task-1396005" [ 1353.517333] env[61978]: _type = "Task" [ 1353.517333] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.528542] env[61978]: DEBUG oslo_vmware.api [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396005, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.556576] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.556745] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Cleaning up deleted instances with incomplete migration {{(pid=61978) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1353.607519] env[61978]: DEBUG nova.compute.manager [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Received event network-vif-plugged-ec006995-1071-4f27-8726-d161611d8e5b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1353.608421] env[61978]: DEBUG oslo_concurrency.lockutils [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] Acquiring lock "b4541d84-b4c3-4441-b5a7-90de2dac3562-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.608421] env[61978]: DEBUG oslo_concurrency.lockutils [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] Lock "b4541d84-b4c3-4441-b5a7-90de2dac3562-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.608421] env[61978]: DEBUG oslo_concurrency.lockutils [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] Lock "b4541d84-b4c3-4441-b5a7-90de2dac3562-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.608421] env[61978]: DEBUG nova.compute.manager [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] No waiting events found dispatching network-vif-plugged-ec006995-1071-4f27-8726-d161611d8e5b {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1353.608702] env[61978]: WARNING nova.compute.manager [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Received unexpected event network-vif-plugged-ec006995-1071-4f27-8726-d161611d8e5b for instance with vm_state active and task_state None. [ 1353.608780] env[61978]: DEBUG nova.compute.manager [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Received event network-changed-ec006995-1071-4f27-8726-d161611d8e5b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1353.608942] env[61978]: DEBUG nova.compute.manager [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Refreshing instance network info cache due to event network-changed-ec006995-1071-4f27-8726-d161611d8e5b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1353.609145] env[61978]: DEBUG oslo_concurrency.lockutils [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] Acquiring lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.609286] env[61978]: DEBUG oslo_concurrency.lockutils [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] Acquired lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.609447] env[61978]: DEBUG nova.network.neutron [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Refreshing network info cache for port ec006995-1071-4f27-8726-d161611d8e5b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1353.619469] env[61978]: DEBUG oslo_vmware.api [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1396004, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139238} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.620287] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1353.620475] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1353.620665] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1353.620860] env[61978]: INFO nova.compute.manager [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1353.621123] env[61978]: DEBUG oslo.service.loopingcall [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1353.621324] env[61978]: DEBUG nova.compute.manager [-] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1353.621439] env[61978]: DEBUG nova.network.neutron [-] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1353.684849] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a549cf1-338c-4ff4-8400-d56bfd6f2c61 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.693561] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0266caa5-f7bb-4e6b-b91a-2b2b3bc835be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.732602] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c5efb4-66e7-4998-8100-3ef2478217dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.741064] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2839940-327b-4337-8df3-cb4319f01753 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.758628] env[61978]: DEBUG nova.compute.provider_tree [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1354.030955] env[61978]: DEBUG oslo_vmware.api [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.257756] env[61978]: DEBUG nova.virt.hardware [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1354.258130] env[61978]: DEBUG nova.virt.hardware [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1354.258611] env[61978]: DEBUG nova.virt.hardware [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1354.258924] env[61978]: DEBUG nova.virt.hardware [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1354.260126] env[61978]: DEBUG nova.virt.hardware [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1354.260126] env[61978]: DEBUG nova.virt.hardware [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1354.260126] env[61978]: DEBUG nova.virt.hardware [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1354.260391] env[61978]: DEBUG nova.virt.hardware [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1354.260445] env[61978]: DEBUG nova.virt.hardware [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1354.260728] env[61978]: DEBUG nova.virt.hardware [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1354.260949] env[61978]: DEBUG nova.virt.hardware [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1354.261889] env[61978]: DEBUG nova.scheduler.client.report [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1354.265871] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e022204-6e9e-4c00-80fa-6de1336dcaf8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.277453] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e4d324-242b-483d-9b4e-e1a5ee59b15f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.292370] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Instance VIF info [] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1354.298588] env[61978]: DEBUG oslo.service.loopingcall [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1354.299294] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1354.299701] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eafcc956-6f14-4d0a-bae2-bb191ec4766b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.317209] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1354.317209] env[61978]: value = "task-1396006" [ 1354.317209] env[61978]: _type = "Task" [ 1354.317209] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.327330] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396006, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.358043] env[61978]: DEBUG nova.network.neutron [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updated VIF entry in instance network info cache for port ec006995-1071-4f27-8726-d161611d8e5b. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1354.358682] env[61978]: DEBUG nova.network.neutron [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updating instance_info_cache with network_info: [{"id": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "address": "fa:16:3e:40:2b:80", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cefaef7-7d", "ovs_interfaceid": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ec006995-1071-4f27-8726-d161611d8e5b", "address": "fa:16:3e:27:98:41", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec006995-10", "ovs_interfaceid": "ec006995-1071-4f27-8726-d161611d8e5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.424722] env[61978]: DEBUG nova.network.neutron [-] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.528944] env[61978]: DEBUG oslo_vmware.api [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396005, 'name': ReconfigVM_Task, 'duration_secs': 0.968832} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.529637] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.529970] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Reconfigured VM to attach interface {{(pid=61978) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1354.537325] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1c6b36-39a4-407f-8b3c-5faea4734ec7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.557374] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance 'ac1676dd-affa-49cd-9e7b-a301abcec232' progress to 0 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1354.770156] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.735s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.795537] env[61978]: INFO nova.scheduler.client.report [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Deleted allocations for instance 001d90e9-9c22-4044-b550-d3acd778222e [ 1354.831027] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396006, 'name': CreateVM_Task, 'duration_secs': 0.30981} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.831027] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1354.831521] env[61978]: DEBUG oslo_concurrency.lockutils [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.831776] env[61978]: DEBUG oslo_concurrency.lockutils [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.832184] env[61978]: DEBUG oslo_concurrency.lockutils [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1354.832495] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f06af27-db47-4672-938d-7f22ca3bedcc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.837433] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1354.837433] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5212df98-9541-d44a-0b6b-830c72f8ab35" [ 1354.837433] env[61978]: _type = "Task" [ 1354.837433] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.845412] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5212df98-9541-d44a-0b6b-830c72f8ab35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.861735] env[61978]: DEBUG oslo_concurrency.lockutils [req-62d007f9-1e04-439f-9a2f-436f7276558a req-ac4d0b7c-665c-4947-afab-e05e077e9133 service nova] Releasing lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.927822] env[61978]: INFO nova.compute.manager [-] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Took 1.31 seconds to deallocate network for instance. [ 1355.037188] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4aa4881f-e234-465a-b06b-47f04c600247 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-b4541d84-b4c3-4441-b5a7-90de2dac3562-ec006995-1071-4f27-8726-d161611d8e5b" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.363s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.066157] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1355.066157] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-967cfd21-7032-4501-9149-2ea86d171ad5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.071300] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1355.071300] env[61978]: value = "task-1396007" [ 1355.071300] env[61978]: _type = "Task" [ 1355.071300] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.080185] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396007, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.306096] env[61978]: DEBUG oslo_concurrency.lockutils [None req-35b159ca-f26b-40c2-af59-210670aeec8a tempest-ServerDiskConfigTestJSON-349209736 tempest-ServerDiskConfigTestJSON-349209736-project-member] Lock "001d90e9-9c22-4044-b550-d3acd778222e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.232s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.347799] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5212df98-9541-d44a-0b6b-830c72f8ab35, 'name': SearchDatastore_Task, 'duration_secs': 0.01852} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.348226] env[61978]: DEBUG oslo_concurrency.lockutils [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.348536] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1355.349066] env[61978]: DEBUG oslo_concurrency.lockutils [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.349066] env[61978]: DEBUG oslo_concurrency.lockutils [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.349218] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1355.349636] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1d1b0c1-5e98-4619-b6b8-86f570891d44 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.358879] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1355.359116] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1355.360222] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b32eed72-ba52-4ef5-a780-1b3f321eddc3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.365284] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1355.365284] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]529a3212-124a-07d2-4870-f42c61b57be9" [ 1355.365284] env[61978]: _type = "Task" [ 1355.365284] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.372696] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529a3212-124a-07d2-4870-f42c61b57be9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.434617] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.434916] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.435176] env[61978]: DEBUG nova.objects.instance [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lazy-loading 'resources' on Instance uuid f33d00ec-72b7-43f2-bc0d-320e3219ae47 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1355.583682] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396007, 'name': PowerOffVM_Task, 'duration_secs': 0.338446} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.584585] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1355.584585] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance 'ac1676dd-affa-49cd-9e7b-a301abcec232' progress to 17 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1355.691179] env[61978]: DEBUG nova.compute.manager [req-ec497f7b-70bf-437a-842c-6b7e986d3d25 req-e690c43e-e5fb-4a0d-b416-67030f793cf5 service nova] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Received event network-vif-deleted-cb02d7c2-d091-4929-a5bd-80c484b81de0 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1355.878014] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]529a3212-124a-07d2-4870-f42c61b57be9, 'name': SearchDatastore_Task, 'duration_secs': 0.011701} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.879454] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1d0c15f-63c1-4e12-b97b-e339a841dce1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.885281] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1355.885281] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bb309e-334b-7da6-dc2d-37bdb33c544b" [ 1355.885281] env[61978]: _type = "Task" [ 1355.885281] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.893949] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bb309e-334b-7da6-dc2d-37bdb33c544b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.066891] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69367222-96f9-48eb-8f39-6e2d0ff80a8f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.074657] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9044ee-6f84-4305-b227-959f74a89c8f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.107183] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1356.111157] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1356.111157] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1356.111157] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1356.111157] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1356.111157] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1356.111157] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1356.111353] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1356.111518] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1356.111720] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1356.111856] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1356.117370] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30fc7ab9-fdc3-4838-88bb-9f2d77a5351e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.128695] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9bf9e7-ece4-4d13-a807-c1a79626f675 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.140036] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf541a5-6f4e-4e84-8345-89ffd508374d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.144039] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1356.144039] env[61978]: value = "task-1396008" [ 1356.144039] env[61978]: _type = "Task" [ 1356.144039] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.158433] env[61978]: DEBUG nova.compute.provider_tree [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1356.165210] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.368306] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "interface-b4541d84-b4c3-4441-b5a7-90de2dac3562-ec006995-1071-4f27-8726-d161611d8e5b" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.368631] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-b4541d84-b4c3-4441-b5a7-90de2dac3562-ec006995-1071-4f27-8726-d161611d8e5b" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.397038] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bb309e-334b-7da6-dc2d-37bdb33c544b, 'name': SearchDatastore_Task, 'duration_secs': 0.01293} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.397171] env[61978]: DEBUG oslo_concurrency.lockutils [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1356.397508] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] c27f7dd1-bdb0-450b-a58e-fe9afafdb368/c27f7dd1-bdb0-450b-a58e-fe9afafdb368.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1356.397915] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2611ffb-fac9-409d-9360-7c6b0938d602 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.406626] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1356.406626] env[61978]: value = "task-1396009" [ 1356.406626] env[61978]: _type = "Task" [ 1356.406626] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.418549] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396009, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.655666] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396008, 'name': ReconfigVM_Task, 'duration_secs': 0.177182} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.656084] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance 'ac1676dd-affa-49cd-9e7b-a301abcec232' progress to 33 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1356.661598] env[61978]: DEBUG nova.scheduler.client.report [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1356.873044] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1356.873044] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.873832] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600b1748-30ea-4cb2-8c35-71ac8eaf12dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.892865] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1168ca9d-1324-4701-95d3-2133b38f8988 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.921149] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Reconfiguring VM to detach interface {{(pid=61978) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1356.926808] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc82f858-ff08-488c-b68d-54280d8a092a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.950130] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396009, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.951680] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1356.951680] env[61978]: value = "task-1396010" [ 1356.951680] env[61978]: _type = "Task" [ 1356.951680] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.963420] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.164323] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1357.164323] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1357.164323] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1357.164635] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1357.164675] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1357.164815] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1357.165059] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1357.165279] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1357.165451] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1357.165622] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1357.166133] env[61978]: DEBUG nova.virt.hardware [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1357.171671] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Reconfiguring VM instance instance-00000067 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1357.172523] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.738s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.174807] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-020a038c-68d8-411f-b8ca-fc796f57c9d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.194099] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1357.194099] env[61978]: value = "task-1396011" [ 1357.194099] env[61978]: _type = "Task" [ 1357.194099] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.202552] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396011, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.215026] env[61978]: INFO nova.scheduler.client.report [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted allocations for instance f33d00ec-72b7-43f2-bc0d-320e3219ae47 [ 1357.423480] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396009, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599274} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.423794] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] c27f7dd1-bdb0-450b-a58e-fe9afafdb368/c27f7dd1-bdb0-450b-a58e-fe9afafdb368.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1357.424038] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1357.424320] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70dec7d9-ebee-48ad-ae64-7ae91fd91f34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.430874] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1357.430874] env[61978]: value = "task-1396012" [ 1357.430874] env[61978]: _type = "Task" [ 1357.430874] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.438895] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396012, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.461355] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.704565] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396011, 'name': ReconfigVM_Task, 'duration_secs': 0.164329} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.704755] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Reconfigured VM instance instance-00000067 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1357.705537] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f745c64-7820-4600-807f-9e91f82d05e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.728860] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] ac1676dd-affa-49cd-9e7b-a301abcec232/ac1676dd-affa-49cd-9e7b-a301abcec232.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1357.731378] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-011eb4af-2cc1-47d1-b3c7-9ccb6647f45d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.744359] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99ef6ced-625d-4220-be6b-bf103263f8ce tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "f33d00ec-72b7-43f2-bc0d-320e3219ae47" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.247s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.750288] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1357.750288] env[61978]: value = "task-1396014" [ 1357.750288] env[61978]: _type = "Task" [ 1357.750288] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.758381] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396014, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.941108] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396012, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078938} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.941510] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1357.942294] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ecee30d-75cd-41db-beab-913744287afd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.962307] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] c27f7dd1-bdb0-450b-a58e-fe9afafdb368/c27f7dd1-bdb0-450b-a58e-fe9afafdb368.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1357.967540] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2814990d-dfca-4cda-bf6b-10e6833d1e70 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.987471] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.988830] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1357.988830] env[61978]: value = "task-1396015" [ 1357.988830] env[61978]: _type = "Task" [ 1357.988830] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.996937] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396015, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.056272] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.260147] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396014, 'name': ReconfigVM_Task, 'duration_secs': 0.419336} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.260486] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Reconfigured VM instance instance-00000067 to attach disk [datastore2] ac1676dd-affa-49cd-9e7b-a301abcec232/ac1676dd-affa-49cd-9e7b-a301abcec232.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1358.260772] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance 'ac1676dd-affa-49cd-9e7b-a301abcec232' progress to 50 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1358.465085] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.498386] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396015, 'name': ReconfigVM_Task, 'duration_secs': 0.301828} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.498735] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Reconfigured VM instance instance-0000006c to attach disk [datastore2] c27f7dd1-bdb0-450b-a58e-fe9afafdb368/c27f7dd1-bdb0-450b-a58e-fe9afafdb368.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1358.499378] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0e6dde8-a420-41b3-b34f-2299a7db3d35 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.505204] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1358.505204] env[61978]: value = "task-1396016" [ 1358.505204] env[61978]: _type = "Task" [ 1358.505204] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.512928] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396016, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.767821] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9645aed2-318c-4ad7-b72d-e51bc3a4635a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.788714] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454006f0-562a-4ab7-970d-5d4221734f2f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.806603] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance 'ac1676dd-affa-49cd-9e7b-a301abcec232' progress to 67 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1358.965699] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.015547] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396016, 'name': Rename_Task, 'duration_secs': 0.142804} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.015868] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1359.016161] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a349e2c3-9f8c-4123-a942-411d0ebffe17 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.023321] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1359.023321] env[61978]: value = "task-1396017" [ 1359.023321] env[61978]: _type = "Task" [ 1359.023321] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.031122] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396017, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.350273] env[61978]: DEBUG nova.network.neutron [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Port f8cf63ba-ee62-4a3a-85e0-87d88ff84665 binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1359.467220] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.533528] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396017, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.556511] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.966140] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.034510] env[61978]: DEBUG oslo_vmware.api [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396017, 'name': PowerOnVM_Task, 'duration_secs': 0.577651} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.034855] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1360.035113] env[61978]: DEBUG nova.compute.manager [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1360.036741] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7704a9d3-73b1-41d9-84b1-ffb76f4bf5ba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.372875] env[61978]: DEBUG oslo_concurrency.lockutils [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "ac1676dd-affa-49cd-9e7b-a301abcec232-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.373083] env[61978]: DEBUG oslo_concurrency.lockutils [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "ac1676dd-affa-49cd-9e7b-a301abcec232-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.373241] env[61978]: DEBUG oslo_concurrency.lockutils [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "ac1676dd-affa-49cd-9e7b-a301abcec232-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1360.466788] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.553481] env[61978]: DEBUG oslo_concurrency.lockutils [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.553729] env[61978]: DEBUG oslo_concurrency.lockutils [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.553913] env[61978]: DEBUG nova.objects.instance [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61978) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1360.965845] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.991664] env[61978]: DEBUG oslo_vmware.rw_handles [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a44f03-88ed-b286-fcd9-4314cfc6d831/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1360.992217] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14aa0928-eb1b-4eee-b7fb-3787423c8d61 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.997989] env[61978]: DEBUG oslo_vmware.rw_handles [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a44f03-88ed-b286-fcd9-4314cfc6d831/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1360.998170] env[61978]: ERROR oslo_vmware.rw_handles [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a44f03-88ed-b286-fcd9-4314cfc6d831/disk-0.vmdk due to incomplete transfer. [ 1360.998451] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a244e8b1-a055-4473-bbfa-5e84bd5ef5d3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.005912] env[61978]: DEBUG oslo_vmware.rw_handles [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a44f03-88ed-b286-fcd9-4314cfc6d831/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1361.006125] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Uploaded image c3d6fb3f-8d33-4f48-a74c-692608eb4ac9 to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1361.008484] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1361.008719] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2a402b09-fee9-496f-a74a-a9936ab6e72d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.014485] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1361.014485] env[61978]: value = "task-1396018" [ 1361.014485] env[61978]: _type = "Task" [ 1361.014485] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.024584] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396018, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.183699] env[61978]: DEBUG oslo_concurrency.lockutils [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "243e7146-46fc-43f4-a83b-cdc58f397f9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.183699] env[61978]: DEBUG oslo_concurrency.lockutils [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "243e7146-46fc-43f4-a83b-cdc58f397f9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.183951] env[61978]: DEBUG oslo_concurrency.lockutils [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "243e7146-46fc-43f4-a83b-cdc58f397f9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.185011] env[61978]: DEBUG oslo_concurrency.lockutils [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "243e7146-46fc-43f4-a83b-cdc58f397f9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.185011] env[61978]: DEBUG oslo_concurrency.lockutils [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "243e7146-46fc-43f4-a83b-cdc58f397f9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.186468] env[61978]: INFO nova.compute.manager [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Terminating instance [ 1361.188425] env[61978]: DEBUG nova.compute.manager [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1361.189365] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1361.190242] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fadc83-ace6-4f15-a667-74dc77187a95 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.197380] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1361.197624] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2547f915-8657-475b-9255-56693ad5e8cb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.203781] env[61978]: DEBUG oslo_vmware.api [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1361.203781] env[61978]: value = "task-1396019" [ 1361.203781] env[61978]: _type = "Task" [ 1361.203781] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.212131] env[61978]: DEBUG oslo_vmware.api [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1396019, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.409169] env[61978]: DEBUG oslo_concurrency.lockutils [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.409382] env[61978]: DEBUG oslo_concurrency.lockutils [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.409580] env[61978]: DEBUG nova.network.neutron [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1361.467903] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.525193] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396018, 'name': Destroy_Task, 'duration_secs': 0.499134} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.525548] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Destroyed the VM [ 1361.525850] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1361.526175] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-801727c1-1d44-4b0e-88f5-cbf15a05d9c7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.533729] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1361.533729] env[61978]: value = "task-1396020" [ 1361.533729] env[61978]: _type = "Task" [ 1361.533729] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.541594] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396020, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.561277] env[61978]: DEBUG oslo_concurrency.lockutils [None req-497f75ac-b0ba-46c3-9518-20e15224d2a2 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.007s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.601183] env[61978]: DEBUG oslo_concurrency.lockutils [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquiring lock "c27f7dd1-bdb0-450b-a58e-fe9afafdb368" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.601461] env[61978]: DEBUG oslo_concurrency.lockutils [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lock "c27f7dd1-bdb0-450b-a58e-fe9afafdb368" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.601680] env[61978]: DEBUG oslo_concurrency.lockutils [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquiring lock "c27f7dd1-bdb0-450b-a58e-fe9afafdb368-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.601876] env[61978]: DEBUG oslo_concurrency.lockutils [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lock "c27f7dd1-bdb0-450b-a58e-fe9afafdb368-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.602057] env[61978]: DEBUG oslo_concurrency.lockutils [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lock "c27f7dd1-bdb0-450b-a58e-fe9afafdb368-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.604263] env[61978]: INFO nova.compute.manager [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Terminating instance [ 1361.605922] env[61978]: DEBUG oslo_concurrency.lockutils [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquiring lock "refresh_cache-c27f7dd1-bdb0-450b-a58e-fe9afafdb368" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.606104] env[61978]: DEBUG oslo_concurrency.lockutils [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquired lock "refresh_cache-c27f7dd1-bdb0-450b-a58e-fe9afafdb368" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.606279] env[61978]: DEBUG nova.network.neutron [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1361.713543] env[61978]: DEBUG oslo_vmware.api [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1396019, 'name': PowerOffVM_Task, 'duration_secs': 0.362894} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.713763] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1361.713938] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1361.714219] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec39001b-beb2-41c8-8be7-23b7c0f85e2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.778173] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1361.778515] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1361.778746] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleting the datastore file [datastore1] 243e7146-46fc-43f4-a83b-cdc58f397f9e {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1361.779043] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf11af4a-09a2-42f5-a285-997e69bb34d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.785311] env[61978]: DEBUG oslo_vmware.api [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for the task: (returnval){ [ 1361.785311] env[61978]: value = "task-1396022" [ 1361.785311] env[61978]: _type = "Task" [ 1361.785311] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.793244] env[61978]: DEBUG oslo_vmware.api [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1396022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.966847] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.044155] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396020, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.128713] env[61978]: DEBUG nova.network.neutron [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1362.177768] env[61978]: DEBUG nova.network.neutron [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance_info_cache with network_info: [{"id": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "address": "fa:16:3e:dd:50:05", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cf63ba-ee", "ovs_interfaceid": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.200142] env[61978]: DEBUG nova.network.neutron [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.294947] env[61978]: DEBUG oslo_vmware.api [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Task: {'id': task-1396022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250527} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.295263] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1362.295455] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1362.295642] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1362.295829] env[61978]: INFO nova.compute.manager [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1362.296098] env[61978]: DEBUG oslo.service.loopingcall [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1362.296308] env[61978]: DEBUG nova.compute.manager [-] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1362.296407] env[61978]: DEBUG nova.network.neutron [-] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1362.467699] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.544107] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396020, 'name': RemoveSnapshot_Task, 'duration_secs': 0.685684} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.544418] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1362.544752] env[61978]: DEBUG nova.compute.manager [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1362.546172] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d53eaa-400a-4c0a-8f69-fee485b6f0ed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.557798] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1362.557798] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1362.560189] env[61978]: DEBUG nova.compute.manager [req-8d4ad90a-2d39-40e1-abb8-97202dba0047 req-5ad51518-9a05-4054-a951-3f9953953cd0 service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Received event network-vif-deleted-2daa968c-ac9c-4f15-ad2b-7977f5581ef1 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1362.560420] env[61978]: INFO nova.compute.manager [req-8d4ad90a-2d39-40e1-abb8-97202dba0047 req-5ad51518-9a05-4054-a951-3f9953953cd0 service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Neutron deleted interface 2daa968c-ac9c-4f15-ad2b-7977f5581ef1; detaching it from the instance and deleting it from the info cache [ 1362.560647] env[61978]: DEBUG nova.network.neutron [req-8d4ad90a-2d39-40e1-abb8-97202dba0047 req-5ad51518-9a05-4054-a951-3f9953953cd0 service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.680486] env[61978]: DEBUG oslo_concurrency.lockutils [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.702806] env[61978]: DEBUG oslo_concurrency.lockutils [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Releasing lock "refresh_cache-c27f7dd1-bdb0-450b-a58e-fe9afafdb368" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.703314] env[61978]: DEBUG nova.compute.manager [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1362.703519] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1362.704516] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396c3530-2911-49ff-bff3-b85a52f972fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.714718] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1362.714718] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcf12db8-2024-4844-b487-20741d3f947b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.726248] env[61978]: DEBUG oslo_vmware.api [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1362.726248] env[61978]: value = "task-1396023" [ 1362.726248] env[61978]: _type = "Task" [ 1362.726248] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.734798] env[61978]: DEBUG oslo_vmware.api [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.969476] env[61978]: DEBUG oslo_vmware.api [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396010, 'name': ReconfigVM_Task, 'duration_secs': 5.793595} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.969739] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.969953] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Reconfigured VM to detach interface {{(pid=61978) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1363.034012] env[61978]: DEBUG nova.network.neutron [-] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.058439] env[61978]: INFO nova.compute.manager [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Shelve offloading [ 1363.060413] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1363.060660] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-acf00f72-d5c7-4251-acac-2680fffaed0f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.063109] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aff4414a-ddca-4cc2-b896-7b759fa4be8a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.070096] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1363.070096] env[61978]: value = "task-1396024" [ 1363.070096] env[61978]: _type = "Task" [ 1363.070096] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.076727] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34729136-ec44-421e-b8c8-9db357e0944a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.094864] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1363.095090] env[61978]: DEBUG nova.compute.manager [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1363.095840] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baebaabc-509e-4a80-9dcb-14ef19544658 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.105638] env[61978]: DEBUG nova.compute.manager [req-8d4ad90a-2d39-40e1-abb8-97202dba0047 req-5ad51518-9a05-4054-a951-3f9953953cd0 service nova] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Detach interface failed, port_id=2daa968c-ac9c-4f15-ad2b-7977f5581ef1, reason: Instance 243e7146-46fc-43f4-a83b-cdc58f397f9e could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1363.109134] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1363.109307] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.109481] env[61978]: DEBUG nova.network.neutron [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1363.204949] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef46367-ffbe-403c-ba85-72d8996e7b6c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.223963] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf009eb8-903f-469d-b532-548c461509a3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.233637] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance 'ac1676dd-affa-49cd-9e7b-a301abcec232' progress to 83 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1363.239786] env[61978]: DEBUG oslo_vmware.api [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396023, 'name': PowerOffVM_Task, 'duration_secs': 0.385269} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.240286] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1363.240467] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1363.240714] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-173d92b7-04a8-4794-ac84-c21520b36cc6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.267818] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1363.268111] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1363.268182] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Deleting the datastore file [datastore2] c27f7dd1-bdb0-450b-a58e-fe9afafdb368 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1363.268478] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-822c7079-220b-4a09-b638-ebe6043a2877 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.275930] env[61978]: DEBUG oslo_vmware.api [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for the task: (returnval){ [ 1363.275930] env[61978]: value = "task-1396026" [ 1363.275930] env[61978]: _type = "Task" [ 1363.275930] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.284524] env[61978]: DEBUG oslo_vmware.api [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.536722] env[61978]: INFO nova.compute.manager [-] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Took 1.24 seconds to deallocate network for instance. [ 1363.556659] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1363.556801] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1363.742690] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1363.742989] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe0240dd-1dcb-459b-8895-8866a24eee1c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.750469] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1363.750469] env[61978]: value = "task-1396027" [ 1363.750469] env[61978]: _type = "Task" [ 1363.750469] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.759637] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396027, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.784866] env[61978]: DEBUG oslo_vmware.api [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Task: {'id': task-1396026, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090097} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.787779] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1363.787960] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1363.788179] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1363.788415] env[61978]: INFO nova.compute.manager [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1363.788693] env[61978]: DEBUG oslo.service.loopingcall [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1363.788929] env[61978]: DEBUG nova.compute.manager [-] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1363.789058] env[61978]: DEBUG nova.network.neutron [-] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1363.811162] env[61978]: DEBUG nova.network.neutron [-] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1363.868616] env[61978]: DEBUG nova.network.neutron [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updating instance_info_cache with network_info: [{"id": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "address": "fa:16:3e:e2:18:65", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a16d335-2f", "ovs_interfaceid": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.044185] env[61978]: DEBUG oslo_concurrency.lockutils [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.044453] env[61978]: DEBUG oslo_concurrency.lockutils [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.044684] env[61978]: DEBUG nova.objects.instance [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lazy-loading 'resources' on Instance uuid 243e7146-46fc-43f4-a83b-cdc58f397f9e {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1364.262692] env[61978]: DEBUG oslo_vmware.api [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396027, 'name': PowerOnVM_Task, 'duration_secs': 0.428159} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.263463] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1364.263666] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-326697b1-72b3-4e2c-b73a-4d1282e1aa6e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance 'ac1676dd-affa-49cd-9e7b-a301abcec232' progress to 100 {{(pid=61978) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1364.314701] env[61978]: DEBUG nova.network.neutron [-] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.318033] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.318033] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquired lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.318033] env[61978]: DEBUG nova.network.neutron [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1364.371211] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.631616] env[61978]: DEBUG nova.compute.manager [req-6a8c5e76-a083-4f5f-8394-147e788c1927 req-7d8daf24-a6a2-4d82-b014-7fb6f5ca6925 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Received event network-vif-unplugged-2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1364.631855] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a8c5e76-a083-4f5f-8394-147e788c1927 req-7d8daf24-a6a2-4d82-b014-7fb6f5ca6925 service nova] Acquiring lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.632111] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a8c5e76-a083-4f5f-8394-147e788c1927 req-7d8daf24-a6a2-4d82-b014-7fb6f5ca6925 service nova] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.632259] env[61978]: DEBUG oslo_concurrency.lockutils [req-6a8c5e76-a083-4f5f-8394-147e788c1927 req-7d8daf24-a6a2-4d82-b014-7fb6f5ca6925 service nova] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.632436] env[61978]: DEBUG nova.compute.manager [req-6a8c5e76-a083-4f5f-8394-147e788c1927 req-7d8daf24-a6a2-4d82-b014-7fb6f5ca6925 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] No waiting events found dispatching network-vif-unplugged-2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1364.632616] env[61978]: WARNING nova.compute.manager [req-6a8c5e76-a083-4f5f-8394-147e788c1927 req-7d8daf24-a6a2-4d82-b014-7fb6f5ca6925 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Received unexpected event network-vif-unplugged-2a16d335-2f9e-47f7-a83c-44777d05ca3b for instance with vm_state shelved and task_state shelving_offloading. [ 1364.685378] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7fdf40-5ab1-4169-8459-d9b788930aa2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.693062] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7026ccfb-d67f-48b9-982d-acdf7e75549d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.723633] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376be32b-0867-4f4f-bb31-789676c38e99 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.727340] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1364.728121] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ced8556-7003-4db6-a94e-cf2237dec19f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.733935] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058c4dd3-deb1-404e-8fd1-95181de4d02d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.739300] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1364.739839] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d330be9-53bd-4f6d-bbea-757caa7cddea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.749108] env[61978]: DEBUG nova.compute.provider_tree [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1364.757183] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "b4541d84-b4c3-4441-b5a7-90de2dac3562" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.757437] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "b4541d84-b4c3-4441-b5a7-90de2dac3562" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.757648] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "b4541d84-b4c3-4441-b5a7-90de2dac3562-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.757839] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "b4541d84-b4c3-4441-b5a7-90de2dac3562-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.758015] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "b4541d84-b4c3-4441-b5a7-90de2dac3562-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.760162] env[61978]: INFO nova.compute.manager [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Terminating instance [ 1364.762204] env[61978]: DEBUG nova.compute.manager [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1364.762417] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1364.763213] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc99263-087a-4356-a4d8-9cb94a5beaa7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.772933] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1364.773521] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9cb985e-d556-433d-952f-8685e9db59b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.779789] env[61978]: DEBUG oslo_vmware.api [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1364.779789] env[61978]: value = "task-1396029" [ 1364.779789] env[61978]: _type = "Task" [ 1364.779789] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.790513] env[61978]: DEBUG oslo_vmware.api [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396029, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.808310] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1364.808559] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1364.808758] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleting the datastore file [datastore2] fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1364.809044] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc1dbed8-3352-4239-9702-2343ab779d29 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.815535] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1364.815535] env[61978]: value = "task-1396030" [ 1364.815535] env[61978]: _type = "Task" [ 1364.815535] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.819358] env[61978]: INFO nova.compute.manager [-] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Took 1.03 seconds to deallocate network for instance. [ 1364.829142] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396030, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.062333] env[61978]: INFO nova.network.neutron [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Port ec006995-1071-4f27-8726-d161611d8e5b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1365.062850] env[61978]: DEBUG nova.network.neutron [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updating instance_info_cache with network_info: [{"id": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "address": "fa:16:3e:40:2b:80", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cefaef7-7d", "ovs_interfaceid": "7cefaef7-7dfd-4081-8872-bbdb8d201973", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.252714] env[61978]: DEBUG nova.scheduler.client.report [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1365.289834] env[61978]: DEBUG oslo_vmware.api [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396029, 'name': PowerOffVM_Task, 'duration_secs': 0.222938} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.290241] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1365.290347] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1365.290566] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15878bc2-6c78-4d08-b73b-925c4846f295 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.325770] env[61978]: DEBUG oslo_vmware.api [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396030, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.272383} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.326796] env[61978]: DEBUG oslo_concurrency.lockutils [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.327085] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1365.327587] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1365.327587] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1365.349239] env[61978]: INFO nova.scheduler.client.report [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleted allocations for instance fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd [ 1365.373525] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1365.373525] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1365.373694] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Deleting the datastore file [datastore1] b4541d84-b4c3-4441-b5a7-90de2dac3562 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1365.373979] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-138ca3e3-4414-4160-bcb9-c7fdbf45fb7f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.380365] env[61978]: DEBUG oslo_vmware.api [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1365.380365] env[61978]: value = "task-1396032" [ 1365.380365] env[61978]: _type = "Task" [ 1365.380365] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.391638] env[61978]: DEBUG oslo_vmware.api [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396032, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.567677] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Releasing lock "refresh_cache-b4541d84-b4c3-4441-b5a7-90de2dac3562" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.601063] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.601227] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquired lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.601403] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Forcefully refreshing network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1365.759596] env[61978]: DEBUG oslo_concurrency.lockutils [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.715s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1365.762107] env[61978]: DEBUG oslo_concurrency.lockutils [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.435s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.762559] env[61978]: DEBUG nova.objects.instance [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lazy-loading 'resources' on Instance uuid c27f7dd1-bdb0-450b-a58e-fe9afafdb368 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1365.778671] env[61978]: INFO nova.scheduler.client.report [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Deleted allocations for instance 243e7146-46fc-43f4-a83b-cdc58f397f9e [ 1365.854313] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.892246] env[61978]: DEBUG oslo_vmware.api [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396032, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.074809] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c28bbd2-f2ba-4533-abad-5cdb17555288 tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "interface-b4541d84-b4c3-4441-b5a7-90de2dac3562-ec006995-1071-4f27-8726-d161611d8e5b" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.706s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.296433] env[61978]: DEBUG oslo_concurrency.lockutils [None req-782b8b43-19df-41c0-9b85-039c10054d43 tempest-ServerActionsTestOtherA-346668886 tempest-ServerActionsTestOtherA-346668886-project-member] Lock "243e7146-46fc-43f4-a83b-cdc58f397f9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.112s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.386995] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f9750b-3cad-490c-8849-32727d86fab7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.398091] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02fce62-88ac-44f3-ba5d-1fe7f199caaf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.402027] env[61978]: DEBUG oslo_vmware.api [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396032, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.585534} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.402400] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1366.402713] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1366.402992] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1366.403479] env[61978]: INFO nova.compute.manager [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1366.403786] env[61978]: DEBUG oslo.service.loopingcall [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1366.404376] env[61978]: DEBUG nova.compute.manager [-] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1366.404481] env[61978]: DEBUG nova.network.neutron [-] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1366.440444] env[61978]: DEBUG nova.network.neutron [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Port f8cf63ba-ee62-4a3a-85e0-87d88ff84665 binding to destination host cpu-1 is already ACTIVE {{(pid=61978) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1366.440716] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.440879] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.441064] env[61978]: DEBUG nova.network.neutron [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1366.443555] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e683a3-988f-4e85-b760-efd29b70f4d2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.454643] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd677a5-71cf-4dfd-98e1-bcc55e82ccde {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.471047] env[61978]: DEBUG nova.compute.provider_tree [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1366.661974] env[61978]: DEBUG nova.compute.manager [req-c6853a7d-753f-4f2f-9e18-15e3587224fb req-28b84d24-1bbd-473a-82e8-a24d7bf5934c service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Received event network-changed-2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1366.662201] env[61978]: DEBUG nova.compute.manager [req-c6853a7d-753f-4f2f-9e18-15e3587224fb req-28b84d24-1bbd-473a-82e8-a24d7bf5934c service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Refreshing instance network info cache due to event network-changed-2a16d335-2f9e-47f7-a83c-44777d05ca3b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1366.662435] env[61978]: DEBUG oslo_concurrency.lockutils [req-c6853a7d-753f-4f2f-9e18-15e3587224fb req-28b84d24-1bbd-473a-82e8-a24d7bf5934c service nova] Acquiring lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.662592] env[61978]: DEBUG oslo_concurrency.lockutils [req-c6853a7d-753f-4f2f-9e18-15e3587224fb req-28b84d24-1bbd-473a-82e8-a24d7bf5934c service nova] Acquired lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.662759] env[61978]: DEBUG nova.network.neutron [req-c6853a7d-753f-4f2f-9e18-15e3587224fb req-28b84d24-1bbd-473a-82e8-a24d7bf5934c service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Refreshing network info cache for port 2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1366.922695] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updating instance_info_cache with network_info: [{"id": "377707c6-c569-41b4-b460-d4ffd83a8c03", "address": "fa:16:3e:38:98:1d", "network": {"id": "3b4d30cc-d9a1-4180-b6eb-881241a1c0f4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-414049673-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3a4f29a959447159b2f7d194ea94873", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap377707c6-c5", "ovs_interfaceid": "377707c6-c569-41b4-b460-d4ffd83a8c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.975286] env[61978]: DEBUG nova.scheduler.client.report [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1367.377204] env[61978]: DEBUG nova.network.neutron [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance_info_cache with network_info: [{"id": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "address": "fa:16:3e:dd:50:05", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cf63ba-ee", "ovs_interfaceid": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.425463] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Releasing lock "refresh_cache-7823099f-efdf-46bf-85d7-69e105dfb02c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.425734] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updated the network info_cache for instance {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1367.425950] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.426492] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.426680] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.426806] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1367.430084] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.491729] env[61978]: DEBUG oslo_concurrency.lockutils [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.493937] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.494401] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.640s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.494465] env[61978]: DEBUG nova.objects.instance [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'resources' on Instance uuid fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1367.527561] env[61978]: INFO nova.scheduler.client.report [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Deleted allocations for instance c27f7dd1-bdb0-450b-a58e-fe9afafdb368 [ 1367.761097] env[61978]: DEBUG nova.network.neutron [req-c6853a7d-753f-4f2f-9e18-15e3587224fb req-28b84d24-1bbd-473a-82e8-a24d7bf5934c service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updated VIF entry in instance network info cache for port 2a16d335-2f9e-47f7-a83c-44777d05ca3b. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1367.761495] env[61978]: DEBUG nova.network.neutron [req-c6853a7d-753f-4f2f-9e18-15e3587224fb req-28b84d24-1bbd-473a-82e8-a24d7bf5934c service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updating instance_info_cache with network_info: [{"id": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "address": "fa:16:3e:e2:18:65", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": null, "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap2a16d335-2f", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.882783] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.930059] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.998762] env[61978]: DEBUG nova.objects.instance [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'numa_topology' on Instance uuid fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1368.014036] env[61978]: DEBUG nova.network.neutron [-] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.038892] env[61978]: DEBUG oslo_concurrency.lockutils [None req-126a9405-c05c-43f3-838d-0486112e7d78 tempest-ServerShowV257Test-298901440 tempest-ServerShowV257Test-298901440-project-member] Lock "c27f7dd1-bdb0-450b-a58e-fe9afafdb368" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.437s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.263985] env[61978]: DEBUG oslo_concurrency.lockutils [req-c6853a7d-753f-4f2f-9e18-15e3587224fb req-28b84d24-1bbd-473a-82e8-a24d7bf5934c service nova] Releasing lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1368.388306] env[61978]: DEBUG nova.compute.manager [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=61978) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:897}} [ 1368.388600] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.501307] env[61978]: DEBUG nova.objects.base [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1368.516283] env[61978]: INFO nova.compute.manager [-] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Took 2.11 seconds to deallocate network for instance. [ 1368.590353] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4712c84-cafa-4cdd-a224-a91fd8ebb3b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.598725] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294151f4-a451-4eaf-aa59-da0428ea880e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.627909] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1ebbf4-1754-42ca-baf0-f326cc9bff23 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.635106] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbd362b-e1fb-481d-a208-3d4266ecfd61 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.648053] env[61978]: DEBUG nova.compute.provider_tree [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1368.691851] env[61978]: DEBUG nova.compute.manager [req-3d9a7479-1eda-4427-986d-344b9beb9a89 req-a55344d7-8041-4fc4-95ea-5bca1df9a012 service nova] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Received event network-vif-deleted-7cefaef7-7dfd-4081-8872-bbdb8d201973 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1369.024649] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.151099] env[61978]: DEBUG nova.scheduler.client.report [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1369.656131] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.162s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.658689] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.729s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.658857] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.659080] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1369.659318] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 1.271s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.661147] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ddddac7-f33e-4a65-9642-221312f39133 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.670391] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2914f3a-dfac-4097-9ab5-8756a8b7eca3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.684847] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75dc6040-636c-470a-b66f-05c8bfb48b40 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.691660] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ded0584-08de-430c-8536-4286dc3cb5f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.720544] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180714MB free_disk=185GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1369.720688] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.167164] env[61978]: DEBUG nova.objects.instance [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'migration_context' on Instance uuid ac1676dd-affa-49cd-9e7b-a301abcec232 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1370.170443] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d6599c76-fff3-438f-b18b-510b7e7f5444 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.616s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1370.171452] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.678s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.171642] env[61978]: INFO nova.compute.manager [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Unshelving [ 1370.765698] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5c5400-434a-46a7-89b3-7156b4d7a7bf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.773423] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de303b2f-d12c-4c9b-9daf-94430a1a0bc4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.803825] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d982d7c-54ca-46d0-a973-3bddd9076127 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.812401] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef42dac-8852-4d1c-8951-cee0bdef0a2b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.825906] env[61978]: DEBUG nova.compute.provider_tree [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1371.193299] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.329035] env[61978]: DEBUG nova.scheduler.client.report [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1371.631683] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Acquiring lock "ce71756d-7a11-46d4-a5dd-a5b720df83c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.631923] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Lock "ce71756d-7a11-46d4-a5dd-a5b720df83c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.137923] env[61978]: DEBUG nova.compute.manager [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1372.341139] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.682s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1372.346663] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.322s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.346886] env[61978]: DEBUG nova.objects.instance [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'resources' on Instance uuid b4541d84-b4c3-4441-b5a7-90de2dac3562 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1372.657706] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.948877] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b659eff-1aea-4341-9045-9e7ca2193360 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.956134] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7eb2a1f-f965-4c66-8aed-fc0d05fa9445 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.986615] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c5377f-4607-453d-80b0-a19768c3bf29 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.994260] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326cc976-41bd-4d6f-a3a9-3d2c50fe04aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.007660] env[61978]: DEBUG nova.compute.provider_tree [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1373.281541] env[61978]: INFO nova.compute.manager [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Rebuilding instance [ 1373.333028] env[61978]: DEBUG nova.compute.manager [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1373.333772] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7fe4f73-00d4-44b3-89ca-60afd986fea0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.511229] env[61978]: DEBUG nova.scheduler.client.report [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1373.844925] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1373.845248] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a718bf73-ce05-4c87-a63d-dfea9dcd717d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.852743] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Waiting for the task: (returnval){ [ 1373.852743] env[61978]: value = "task-1396033" [ 1373.852743] env[61978]: _type = "Task" [ 1373.852743] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.860311] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1396033, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.876460] env[61978]: INFO nova.compute.manager [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Swapping old allocation on dict_keys(['44209228-3464-48ae-bc40-83eccd44b0cf']) held by migration 5150d922-c55e-4cf5-aa83-26e59b3838ba for instance [ 1373.903016] env[61978]: DEBUG nova.scheduler.client.report [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Overwriting current allocation {'allocations': {'44209228-3464-48ae-bc40-83eccd44b0cf': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 155}}, 'project_id': '86ad52b551104a2594f1dbbc287f9efa', 'user_id': '4a208cee3d9c4efb8240ad943b55e915', 'consumer_generation': 1} on consumer ac1676dd-affa-49cd-9e7b-a301abcec232 {{(pid=61978) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1373.986111] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1373.986378] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1373.986648] env[61978]: DEBUG nova.network.neutron [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1374.016296] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.669s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.018667] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.298s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.034799] env[61978]: INFO nova.scheduler.client.report [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Deleted allocations for instance b4541d84-b4c3-4441-b5a7-90de2dac3562 [ 1374.362697] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1396033, 'name': PowerOffVM_Task, 'duration_secs': 0.185052} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.363025] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1374.363916] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1374.364291] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29c94dd6-da4c-4a97-83b8-a599280b5b33 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.370455] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Waiting for the task: (returnval){ [ 1374.370455] env[61978]: value = "task-1396034" [ 1374.370455] env[61978]: _type = "Task" [ 1374.370455] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.379162] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1396034, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.543820] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9cef7fab-69d2-4134-a0ad-57e4518f711b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "b4541d84-b4c3-4441-b5a7-90de2dac3562" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.786s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.703015] env[61978]: DEBUG nova.network.neutron [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance_info_cache with network_info: [{"id": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "address": "fa:16:3e:dd:50:05", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8cf63ba-ee", "ovs_interfaceid": "f8cf63ba-ee62-4a3a-85e0-87d88ff84665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1374.828980] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "7823099f-efdf-46bf-85d7-69e105dfb02c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.829292] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "7823099f-efdf-46bf-85d7-69e105dfb02c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.829515] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "7823099f-efdf-46bf-85d7-69e105dfb02c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.829704] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "7823099f-efdf-46bf-85d7-69e105dfb02c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.829879] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "7823099f-efdf-46bf-85d7-69e105dfb02c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.831941] env[61978]: INFO nova.compute.manager [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Terminating instance [ 1374.833620] env[61978]: DEBUG nova.compute.manager [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1374.833823] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1374.834658] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2246954c-f5fb-462f-a545-c990add1e112 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.842518] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1374.842734] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac67e374-386c-4db3-9f0f-51139af60830 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.848797] env[61978]: DEBUG oslo_vmware.api [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1374.848797] env[61978]: value = "task-1396035" [ 1374.848797] env[61978]: _type = "Task" [ 1374.848797] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.856627] env[61978]: DEBUG oslo_vmware.api [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396035, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.879914] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1374.880182] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1374.880403] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296041', 'volume_id': '929bd504-a0b6-42c2-88ae-ee98db6decf8', 'name': 'volume-929bd504-a0b6-42c2-88ae-ee98db6decf8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'db960922-12b5-41e7-9de3-312136819bb0', 'attached_at': '', 'detached_at': '', 'volume_id': '929bd504-a0b6-42c2-88ae-ee98db6decf8', 'serial': '929bd504-a0b6-42c2-88ae-ee98db6decf8'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1374.881208] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa744a2e-6fa1-40be-b426-661e36b81729 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.899015] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26e52de-7ff7-4b6d-aab5-55b353b80db4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.905612] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38f9ac1-742c-4cdf-baee-27563621c9c7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.923264] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69af15b2-bd0b-44f5-8e04-820e427e4aaa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.939169] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] The volume has not been displaced from its original location: [datastore2] volume-929bd504-a0b6-42c2-88ae-ee98db6decf8/volume-929bd504-a0b6-42c2-88ae-ee98db6decf8.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1374.944585] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Reconfiguring VM instance instance-0000006a to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1374.944920] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-664a874b-eaeb-44f2-8ee6-29b9a5d10e8c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.962684] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Waiting for the task: (returnval){ [ 1374.962684] env[61978]: value = "task-1396036" [ 1374.962684] env[61978]: _type = "Task" [ 1374.962684] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.972553] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1396036, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.044945] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 7823099f-efdf-46bf-85d7-69e105dfb02c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.045158] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance db960922-12b5-41e7-9de3-312136819bb0 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.205676] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-ac1676dd-affa-49cd-9e7b-a301abcec232" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.206185] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1375.206470] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d48e5176-407f-4b02-be4e-a18473ab077c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.213734] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1375.213734] env[61978]: value = "task-1396037" [ 1375.213734] env[61978]: _type = "Task" [ 1375.213734] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.222039] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396037, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.359094] env[61978]: DEBUG oslo_vmware.api [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396035, 'name': PowerOffVM_Task, 'duration_secs': 0.174354} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.359387] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1375.359566] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1375.359825] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f34c4b1-9f24-4113-8dc8-68d4ceece743 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.423156] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1375.423548] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1375.423593] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Deleting the datastore file [datastore2] 7823099f-efdf-46bf-85d7-69e105dfb02c {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1375.423883] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5915be6-f110-4de2-aab8-e9a756557746 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.430579] env[61978]: DEBUG oslo_vmware.api [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for the task: (returnval){ [ 1375.430579] env[61978]: value = "task-1396039" [ 1375.430579] env[61978]: _type = "Task" [ 1375.430579] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.437891] env[61978]: DEBUG oslo_vmware.api [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396039, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.471413] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1396036, 'name': ReconfigVM_Task, 'duration_secs': 0.148447} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.471694] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Reconfigured VM instance instance-0000006a to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1375.476060] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45f35675-2d7e-4ad1-a607-f98b3a0c6ac2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.496301] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Waiting for the task: (returnval){ [ 1375.496301] env[61978]: value = "task-1396040" [ 1375.496301] env[61978]: _type = "Task" [ 1375.496301] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.504017] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1396040, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.548646] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.723617] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396037, 'name': PowerOffVM_Task, 'duration_secs': 0.172658} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.723856] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1375.724512] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1375.724733] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1375.724894] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1375.725094] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1375.725248] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1375.725435] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1375.725651] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1375.725813] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1375.725981] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1375.726162] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1375.726340] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1375.731119] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b3c8878-2232-4745-93ca-27d09ab2968b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.745789] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1375.745789] env[61978]: value = "task-1396041" [ 1375.745789] env[61978]: _type = "Task" [ 1375.745789] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.753151] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396041, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.940359] env[61978]: DEBUG oslo_vmware.api [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Task: {'id': task-1396039, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106634} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.940635] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1375.940830] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1375.941028] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1375.941218] env[61978]: INFO nova.compute.manager [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1375.941456] env[61978]: DEBUG oslo.service.loopingcall [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1375.941653] env[61978]: DEBUG nova.compute.manager [-] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1375.941750] env[61978]: DEBUG nova.network.neutron [-] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1376.005429] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1396040, 'name': ReconfigVM_Task, 'duration_secs': 0.125082} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.005760] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296041', 'volume_id': '929bd504-a0b6-42c2-88ae-ee98db6decf8', 'name': 'volume-929bd504-a0b6-42c2-88ae-ee98db6decf8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'db960922-12b5-41e7-9de3-312136819bb0', 'attached_at': '', 'detached_at': '', 'volume_id': '929bd504-a0b6-42c2-88ae-ee98db6decf8', 'serial': '929bd504-a0b6-42c2-88ae-ee98db6decf8'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1376.006043] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1376.006779] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78221768-bfe8-4720-aeb1-bada4c0bc219 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.013293] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1376.013514] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a8c219a-3c96-4dfc-aff7-aa53cf4ddcaa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.051898] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance ce71756d-7a11-46d4-a5dd-a5b720df83c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1376.052093] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance ac1676dd-affa-49cd-9e7b-a301abcec232 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1376.052297] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1376.052472] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1376.073243] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1376.073608] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1376.073894] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Deleting the datastore file [datastore2] db960922-12b5-41e7-9de3-312136819bb0 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1376.074193] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8dd5f0c-76bc-43b2-993c-8e1d325da6cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.081624] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Waiting for the task: (returnval){ [ 1376.081624] env[61978]: value = "task-1396043" [ 1376.081624] env[61978]: _type = "Task" [ 1376.081624] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.091308] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1396043, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.127765] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf3ac8a-b15a-4986-af89-f1be8b73e129 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.137187] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25e5515-cc44-440c-a4c2-63b5095c330c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.188254] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19ec598-da30-436a-98f8-f2ff8af78b7b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.199088] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204cd777-88d7-42e0-9e8f-de9335572536 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.212732] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1376.219318] env[61978]: DEBUG nova.compute.manager [req-d02b70bd-40f4-424c-bf10-a88501e12263 req-913820a8-99ad-4db0-951f-b4fd5dfab59b service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Received event network-vif-deleted-377707c6-c569-41b4-b460-d4ffd83a8c03 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1376.219551] env[61978]: INFO nova.compute.manager [req-d02b70bd-40f4-424c-bf10-a88501e12263 req-913820a8-99ad-4db0-951f-b4fd5dfab59b service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Neutron deleted interface 377707c6-c569-41b4-b460-d4ffd83a8c03; detaching it from the instance and deleting it from the info cache [ 1376.219747] env[61978]: DEBUG nova.network.neutron [req-d02b70bd-40f4-424c-bf10-a88501e12263 req-913820a8-99ad-4db0-951f-b4fd5dfab59b service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.255330] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396041, 'name': ReconfigVM_Task, 'duration_secs': 0.130758} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.256139] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818afbae-d660-4f0d-a253-125b77ad5077 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.274158] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1376.274419] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1376.274586] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1376.274772] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1376.274924] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1376.275094] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1376.275323] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1376.275512] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1376.275687] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1376.275853] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1376.276041] env[61978]: DEBUG nova.virt.hardware [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1376.276782] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33cac67a-a00e-44e8-b53d-8007e2d8150f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.282273] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1376.282273] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a61eb9-1185-4bea-36ad-6771f85d1744" [ 1376.282273] env[61978]: _type = "Task" [ 1376.282273] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.290152] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a61eb9-1185-4bea-36ad-6771f85d1744, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.594016] env[61978]: DEBUG oslo_vmware.api [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Task: {'id': task-1396043, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076468} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.594297] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1376.594487] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1376.594667] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1376.648075] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1376.648428] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63ce959f-329e-4438-996e-9eebb0b5b64e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.657552] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5faa668-a725-4d8e-8d75-3378b2774cb5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.683356] env[61978]: ERROR nova.compute.manager [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Failed to detach volume 929bd504-a0b6-42c2-88ae-ee98db6decf8 from /dev/sda: nova.exception.InstanceNotFound: Instance db960922-12b5-41e7-9de3-312136819bb0 could not be found. [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] Traceback (most recent call last): [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] self.driver.rebuild(**kwargs) [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] raise NotImplementedError() [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] NotImplementedError [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] During handling of the above exception, another exception occurred: [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] Traceback (most recent call last): [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] self.driver.detach_volume(context, old_connection_info, [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] return self._volumeops.detach_volume(connection_info, instance) [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] self._detach_volume_vmdk(connection_info, instance) [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] stable_ref.fetch_moref(session) [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] nova.exception.InstanceNotFound: Instance db960922-12b5-41e7-9de3-312136819bb0 could not be found. [ 1376.683356] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] [ 1376.703794] env[61978]: DEBUG nova.network.neutron [-] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.715715] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1376.721928] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e20b4ed2-5138-4c04-8800-b2ef68109385 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.731176] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583216d2-ed6b-42ad-a92e-2d0a01d5b7fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.755423] env[61978]: DEBUG nova.compute.manager [req-d02b70bd-40f4-424c-bf10-a88501e12263 req-913820a8-99ad-4db0-951f-b4fd5dfab59b service nova] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Detach interface failed, port_id=377707c6-c569-41b4-b460-d4ffd83a8c03, reason: Instance 7823099f-efdf-46bf-85d7-69e105dfb02c could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1376.791568] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a61eb9-1185-4bea-36ad-6771f85d1744, 'name': SearchDatastore_Task, 'duration_secs': 0.008986} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.796880] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Reconfiguring VM instance instance-00000067 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1376.797212] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14c04e0a-2ccd-4182-8b5f-8d76fd926232 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.815329] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1376.815329] env[61978]: value = "task-1396044" [ 1376.815329] env[61978]: _type = "Task" [ 1376.815329] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.819740] env[61978]: DEBUG nova.compute.utils [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Build of instance db960922-12b5-41e7-9de3-312136819bb0 aborted: Failed to rebuild volume backed instance. {{(pid=61978) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1376.825364] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396044, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.826269] env[61978]: ERROR nova.compute.manager [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance db960922-12b5-41e7-9de3-312136819bb0 aborted: Failed to rebuild volume backed instance. [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] Traceback (most recent call last): [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] self.driver.rebuild(**kwargs) [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] raise NotImplementedError() [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] NotImplementedError [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] During handling of the above exception, another exception occurred: [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] Traceback (most recent call last): [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/compute/manager.py", line 3600, in _rebuild_volume_backed_instance [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] self._detach_root_volume(context, instance, root_bdm) [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/compute/manager.py", line 3579, in _detach_root_volume [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] with excutils.save_and_reraise_exception(): [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] self.force_reraise() [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] raise self.value [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] self.driver.detach_volume(context, old_connection_info, [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] return self._volumeops.detach_volume(connection_info, instance) [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] self._detach_volume_vmdk(connection_info, instance) [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] stable_ref.fetch_moref(session) [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] nova.exception.InstanceNotFound: Instance db960922-12b5-41e7-9de3-312136819bb0 could not be found. [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] During handling of the above exception, another exception occurred: [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] Traceback (most recent call last): [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/compute/manager.py", line 10865, in _error_out_instance_on_exception [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] yield [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/compute/manager.py", line 3868, in rebuild_instance [ 1376.826269] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] self._do_rebuild_instance_with_claim( [ 1376.827278] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/compute/manager.py", line 3954, in _do_rebuild_instance_with_claim [ 1376.827278] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] self._do_rebuild_instance( [ 1376.827278] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/compute/manager.py", line 4146, in _do_rebuild_instance [ 1376.827278] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] self._rebuild_default_impl(**kwargs) [ 1376.827278] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/compute/manager.py", line 3723, in _rebuild_default_impl [ 1376.827278] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] self._rebuild_volume_backed_instance( [ 1376.827278] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] File "/opt/stack/nova/nova/compute/manager.py", line 3615, in _rebuild_volume_backed_instance [ 1376.827278] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] raise exception.BuildAbortException( [ 1376.827278] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] nova.exception.BuildAbortException: Build of instance db960922-12b5-41e7-9de3-312136819bb0 aborted: Failed to rebuild volume backed instance. [ 1376.827278] env[61978]: ERROR nova.compute.manager [instance: db960922-12b5-41e7-9de3-312136819bb0] [ 1377.207414] env[61978]: INFO nova.compute.manager [-] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Took 1.27 seconds to deallocate network for instance. [ 1377.220729] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1377.220922] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.202s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.221230] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.028s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.221459] env[61978]: DEBUG nova.objects.instance [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'pci_requests' on Instance uuid fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1377.222375] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1377.222522] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Cleaning up deleted instances {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1377.328802] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396044, 'name': ReconfigVM_Task, 'duration_secs': 0.17874} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.332297] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Reconfigured VM instance instance-00000067 to detach disk 2000 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1377.333660] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8e55fa-6f7c-43a4-bb03-1831b645b0e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.368994] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] ac1676dd-affa-49cd-9e7b-a301abcec232/ac1676dd-affa-49cd-9e7b-a301abcec232.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1377.373969] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-293f8396-2dca-43f3-a82a-cfbc52b19d14 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.394631] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1377.394631] env[61978]: value = "task-1396045" [ 1377.394631] env[61978]: _type = "Task" [ 1377.394631] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.402320] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396045, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.713598] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.725611] env[61978]: DEBUG nova.objects.instance [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'numa_topology' on Instance uuid fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1377.738178] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] There are 57 instances to clean {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1377.738349] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: c27f7dd1-bdb0-450b-a58e-fe9afafdb368] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1377.740706] env[61978]: INFO nova.compute.claims [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1377.905056] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396045, 'name': ReconfigVM_Task, 'duration_secs': 0.241421} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.905353] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Reconfigured VM instance instance-00000067 to attach disk [datastore2] ac1676dd-affa-49cd-9e7b-a301abcec232/ac1676dd-affa-49cd-9e7b-a301abcec232.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1377.906172] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33996eb-e42b-46d1-bb8f-08b74893a338 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.923990] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e6b07d-ef90-4fb2-9a9b-1aeda9e8b1aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.940537] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5c88ff-4198-4676-980c-fc9dd79d31a2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.956875] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ca51da-aa54-4126-a2b2-62a5959f0306 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.962751] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1377.962969] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d77028e8-6056-4086-b504-5e13d32fd736 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.968681] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1377.968681] env[61978]: value = "task-1396046" [ 1377.968681] env[61978]: _type = "Task" [ 1377.968681] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.975387] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396046, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.246548] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 001d90e9-9c22-4044-b550-d3acd778222e] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1378.477868] env[61978]: DEBUG oslo_vmware.api [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396046, 'name': PowerOnVM_Task, 'duration_secs': 0.353549} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.478161] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1378.751540] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a5b3f628-edc6-4d30-a179-ffc755f940f7] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1378.813886] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1e2b53-e4d2-4459-9ed2-65ff7f6fcd81 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.821534] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8fe377-22fd-4062-b5b0-a7734d084855 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.850727] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.851888] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd85f24-d457-41f0-8602-2dd1b59df976 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.858900] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461470a2-72da-42a1-a61a-97d6a53cb7a8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.871386] env[61978]: DEBUG nova.compute.provider_tree [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1379.167316] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Acquiring lock "db960922-12b5-41e7-9de3-312136819bb0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.167645] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lock "db960922-12b5-41e7-9de3-312136819bb0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.167882] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Acquiring lock "db960922-12b5-41e7-9de3-312136819bb0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.168090] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lock "db960922-12b5-41e7-9de3-312136819bb0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.168271] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lock "db960922-12b5-41e7-9de3-312136819bb0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.170396] env[61978]: INFO nova.compute.manager [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Terminating instance [ 1379.173930] env[61978]: DEBUG nova.compute.manager [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1379.174232] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ab1e26ff-5f5a-46a1-bb7b-c4802c028a41 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.183838] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f741dd-8c4e-4ac3-92a3-6292f3e4dd5a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.875009] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b4541d84-b4c3-4441-b5a7-90de2dac3562] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1379.881021] env[61978]: DEBUG nova.scheduler.client.report [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1379.881316] env[61978]: INFO nova.compute.manager [None req-e340bedf-72ad-475c-a72c-2e57f3a706fe tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance to original state: 'active' [ 1379.898682] env[61978]: WARNING nova.virt.vmwareapi.driver [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance db960922-12b5-41e7-9de3-312136819bb0 could not be found. [ 1379.898800] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1379.899657] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5551ba5-82e3-4125-bf16-8540d64add7e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.910033] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93765420-606f-446d-9b9e-05a0b8726697 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.934535] env[61978]: WARNING nova.virt.vmwareapi.vmops [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db960922-12b5-41e7-9de3-312136819bb0 could not be found. [ 1379.934836] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1379.935097] env[61978]: INFO nova.compute.manager [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Took 0.76 seconds to destroy the instance on the hypervisor. [ 1379.935428] env[61978]: DEBUG oslo.service.loopingcall [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1379.935784] env[61978]: DEBUG nova.compute.manager [-] [instance: db960922-12b5-41e7-9de3-312136819bb0] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1379.935926] env[61978]: DEBUG nova.network.neutron [-] [instance: db960922-12b5-41e7-9de3-312136819bb0] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1380.387747] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 48d05337-7018-4dc2-a6a4-dd80ad3c4eb1] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1380.389845] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.169s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.399399] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.741s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.400766] env[61978]: INFO nova.compute.claims [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1380.404673] env[61978]: DEBUG nova.compute.manager [req-5b628aa3-b3a8-4b7c-a569-1ed43722cc74 req-120079e8-5b2a-4e9f-ad35-7431b2b8666e service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Received event network-vif-deleted-e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1380.404859] env[61978]: INFO nova.compute.manager [req-5b628aa3-b3a8-4b7c-a569-1ed43722cc74 req-120079e8-5b2a-4e9f-ad35-7431b2b8666e service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Neutron deleted interface e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b; detaching it from the instance and deleting it from the info cache [ 1380.405043] env[61978]: DEBUG nova.network.neutron [req-5b628aa3-b3a8-4b7c-a569-1ed43722cc74 req-120079e8-5b2a-4e9f-ad35-7431b2b8666e service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.446495] env[61978]: INFO nova.network.neutron [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updating port 2a16d335-2f9e-47f7-a83c-44777d05ca3b with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1380.867547] env[61978]: DEBUG nova.network.neutron [-] [instance: db960922-12b5-41e7-9de3-312136819bb0] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.896177] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 764fdf3c-a6ce-4cd6-9190-d2d43fded0fa] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1380.911077] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81ff9ddd-1a12-49b9-be7b-51dbc9b2437e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.924081] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9d9e13-1654-45e2-b187-c2781d140e78 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.950516] env[61978]: DEBUG nova.compute.manager [req-5b628aa3-b3a8-4b7c-a569-1ed43722cc74 req-120079e8-5b2a-4e9f-ad35-7431b2b8666e service nova] [instance: db960922-12b5-41e7-9de3-312136819bb0] Detach interface failed, port_id=e15a01e2-5ddc-4a47-b0a3-e1013c6d8b0b, reason: Instance db960922-12b5-41e7-9de3-312136819bb0 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1381.188248] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "ac1676dd-affa-49cd-9e7b-a301abcec232" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.188972] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "ac1676dd-affa-49cd-9e7b-a301abcec232" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.188972] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "ac1676dd-affa-49cd-9e7b-a301abcec232-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.188972] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "ac1676dd-affa-49cd-9e7b-a301abcec232-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.189160] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "ac1676dd-affa-49cd-9e7b-a301abcec232-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.191339] env[61978]: INFO nova.compute.manager [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Terminating instance [ 1381.195017] env[61978]: DEBUG nova.compute.manager [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1381.195017] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1381.195017] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf29990-264b-4b0a-a524-0c6185ffff92 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.201746] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1381.201973] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55b43c20-b923-48ea-8963-a55f8576df20 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.208244] env[61978]: DEBUG oslo_vmware.api [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1381.208244] env[61978]: value = "task-1396047" [ 1381.208244] env[61978]: _type = "Task" [ 1381.208244] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.217964] env[61978]: DEBUG oslo_vmware.api [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396047, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.370555] env[61978]: INFO nova.compute.manager [-] [instance: db960922-12b5-41e7-9de3-312136819bb0] Took 1.43 seconds to deallocate network for instance. [ 1381.399579] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: c59fbfc9-4b5c-4a0e-8bbf-44b216d5f8b9] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1381.479082] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f9d08c-b08a-4df6-9939-1485cf0c1bb8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.487396] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4530dd7-747d-4506-815f-79153e08c4e5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.516440] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08404cd-37c1-48ab-8652-be84c2fcaf12 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.523793] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4956fe-b20f-4e59-b0f2-a1726ea7db2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.538263] env[61978]: DEBUG nova.compute.provider_tree [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.718231] env[61978]: DEBUG oslo_vmware.api [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396047, 'name': PowerOffVM_Task, 'duration_secs': 0.264909} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.718515] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1381.718686] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1381.718931] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7182c36-4afa-48c4-9aec-85a77e014606 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.779607] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1381.779989] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1381.780263] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleting the datastore file [datastore2] ac1676dd-affa-49cd-9e7b-a301abcec232 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1381.780538] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5d007a6-2266-4efb-b7bd-f48ee6f17313 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.786455] env[61978]: DEBUG oslo_vmware.api [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1381.786455] env[61978]: value = "task-1396049" [ 1381.786455] env[61978]: _type = "Task" [ 1381.786455] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.794778] env[61978]: DEBUG oslo_vmware.api [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396049, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.905280] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 845ec88d-5d2b-479c-a2d1-fa235b2b87b3] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1381.922459] env[61978]: INFO nova.compute.manager [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Took 0.55 seconds to detach 1 volumes for instance. [ 1381.923227] env[61978]: DEBUG nova.compute.manager [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Deleting volume: 929bd504-a0b6-42c2-88ae-ee98db6decf8 {{(pid=61978) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1382.041584] env[61978]: DEBUG nova.scheduler.client.report [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1382.130978] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.131224] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.131413] env[61978]: DEBUG nova.network.neutron [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1382.296800] env[61978]: DEBUG oslo_vmware.api [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396049, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148259} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.297093] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1382.297298] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1382.297617] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1382.297817] env[61978]: INFO nova.compute.manager [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1382.298080] env[61978]: DEBUG oslo.service.loopingcall [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1382.298289] env[61978]: DEBUG nova.compute.manager [-] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1382.298386] env[61978]: DEBUG nova.network.neutron [-] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1382.407242] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: de8abe58-e0c2-4eaf-b3a6-7106e0861080] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1382.422715] env[61978]: DEBUG nova.compute.manager [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Received event network-vif-plugged-2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1382.422945] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] Acquiring lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.423180] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.423359] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.423532] env[61978]: DEBUG nova.compute.manager [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] No waiting events found dispatching network-vif-plugged-2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1382.423702] env[61978]: WARNING nova.compute.manager [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Received unexpected event network-vif-plugged-2a16d335-2f9e-47f7-a83c-44777d05ca3b for instance with vm_state shelved_offloaded and task_state spawning. [ 1382.423874] env[61978]: DEBUG nova.compute.manager [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Received event network-changed-2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1382.424235] env[61978]: DEBUG nova.compute.manager [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Refreshing instance network info cache due to event network-changed-2a16d335-2f9e-47f7-a83c-44777d05ca3b. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1382.424475] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] Acquiring lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.494244] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.546515] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.148s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.547069] env[61978]: DEBUG nova.compute.manager [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1382.549657] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.836s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.549886] env[61978]: DEBUG nova.objects.instance [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lazy-loading 'resources' on Instance uuid 7823099f-efdf-46bf-85d7-69e105dfb02c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1382.910349] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: f9b57cf4-f2e4-4d2a-9bd4-74952d46876d] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1382.935893] env[61978]: DEBUG nova.network.neutron [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updating instance_info_cache with network_info: [{"id": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "address": "fa:16:3e:e2:18:65", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a16d335-2f", "ovs_interfaceid": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.053627] env[61978]: DEBUG nova.compute.utils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1383.058015] env[61978]: DEBUG nova.compute.manager [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1383.058015] env[61978]: DEBUG nova.network.neutron [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1383.101229] env[61978]: DEBUG nova.policy [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df3feaef5eef4561bc49693709a5c2d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1734dd57233346c2b3214f5424b55c64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1383.131196] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a8868e-5438-4072-a7d2-6e8e5d63aac9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.140383] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153ae1ed-ec32-455e-a30b-3cae573b4ace {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.178464] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324c3ea2-75d4-407a-b0b0-9ffdc1f299f2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.186759] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9431a998-94ac-4368-b527-1e4c6fdeb9fe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.199769] env[61978]: DEBUG nova.compute.provider_tree [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1383.341767] env[61978]: DEBUG nova.network.neutron [-] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.416789] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 03b08977-4b20-4bac-b48b-06ba5df4e579] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1383.439091] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.442235] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] Acquired lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.442235] env[61978]: DEBUG nova.network.neutron [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Refreshing network info cache for port 2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.471988] env[61978]: DEBUG nova.virt.hardware [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9abf420057646ddb3cb03130daf2d46a',container_format='bare',created_at=2024-11-04T15:11:51Z,direct_url=,disk_format='vmdk',id=c3d6fb3f-8d33-4f48-a74c-692608eb4ac9,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1260247667-shelved',owner='2af733ffc4384fa1a2c59f4a45f1778c',properties=ImageMetaProps,protected=,size=31664640,status='active',tags=,updated_at=2024-11-04T15:12:04Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1383.472279] env[61978]: DEBUG nova.virt.hardware [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1383.472442] env[61978]: DEBUG nova.virt.hardware [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1383.472633] env[61978]: DEBUG nova.virt.hardware [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1383.472785] env[61978]: DEBUG nova.virt.hardware [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1383.472936] env[61978]: DEBUG nova.virt.hardware [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1383.473164] env[61978]: DEBUG nova.virt.hardware [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1383.473329] env[61978]: DEBUG nova.virt.hardware [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1383.473505] env[61978]: DEBUG nova.virt.hardware [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1383.473748] env[61978]: DEBUG nova.virt.hardware [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1383.473853] env[61978]: DEBUG nova.virt.hardware [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1383.475030] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b8d88f-4cc1-4ec8-8caa-8f115ba0e080 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.484312] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54ebf67-0b70-4c17-b0e9-b82e1a656890 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.507051] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:18:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac7039c0-3374-4c08-87fc-af2449b48b02', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a16d335-2f9e-47f7-a83c-44777d05ca3b', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1383.514905] env[61978]: DEBUG oslo.service.loopingcall [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1383.515102] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1383.515319] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4936af3b-dd39-44a5-aa5a-dde41539c106 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.543031] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1383.543031] env[61978]: value = "task-1396051" [ 1383.543031] env[61978]: _type = "Task" [ 1383.543031] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.557432] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396051, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.559240] env[61978]: DEBUG nova.compute.manager [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1383.563396] env[61978]: DEBUG nova.network.neutron [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Successfully created port: 7e93119a-e6cd-401f-bb4a-c41a130cd596 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1383.726300] env[61978]: ERROR nova.scheduler.client.report [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] [req-81c5ee21-b2a9-4ad3-9b81-2718a4f70d3a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-81c5ee21-b2a9-4ad3-9b81-2718a4f70d3a"}]} [ 1383.744437] env[61978]: DEBUG nova.scheduler.client.report [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1383.765571] env[61978]: DEBUG nova.scheduler.client.report [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1383.765848] env[61978]: DEBUG nova.compute.provider_tree [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1383.779694] env[61978]: DEBUG nova.scheduler.client.report [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1383.800622] env[61978]: DEBUG nova.scheduler.client.report [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1383.843013] env[61978]: INFO nova.compute.manager [-] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Took 1.54 seconds to deallocate network for instance. [ 1383.882752] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8538546-6d60-456b-a708-8a4f28f674e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.891772] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1ccf04-ae62-45e5-bf4e-4b6622e7caa9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.920702] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 4d357d46-8bbb-4228-a5a6-2ce67fe037d7] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1383.924296] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ee0132-c634-4b82-b38e-c776852034f6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.931491] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c08460-d51a-43e6-a360-d4ebbe5ccc80 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.946608] env[61978]: DEBUG nova.compute.provider_tree [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1384.052370] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396051, 'name': CreateVM_Task, 'duration_secs': 0.3534} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.052552] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1384.053289] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.053534] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.055330] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1384.055330] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8789293d-6b2f-4dfd-8f6c-c9304cb5a84b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.059067] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1384.059067] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]525472c4-7611-ecd1-24c1-0887ce09765c" [ 1384.059067] env[61978]: _type = "Task" [ 1384.059067] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.070166] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]525472c4-7611-ecd1-24c1-0887ce09765c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.339166] env[61978]: DEBUG nova.network.neutron [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updated VIF entry in instance network info cache for port 2a16d335-2f9e-47f7-a83c-44777d05ca3b. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1384.339561] env[61978]: DEBUG nova.network.neutron [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updating instance_info_cache with network_info: [{"id": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "address": "fa:16:3e:e2:18:65", "network": {"id": "183195a1-ec97-4ecd-9e83-b89bb1337939", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-998439254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2af733ffc4384fa1a2c59f4a45f1778c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a16d335-2f", "ovs_interfaceid": "2a16d335-2f9e-47f7-a83c-44777d05ca3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.358638] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.423573] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: f33d00ec-72b7-43f2-bc0d-320e3219ae47] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1384.459907] env[61978]: DEBUG nova.compute.manager [req-6836a8ce-e396-43fe-b212-4c3d33eb570b req-00069ef8-e48c-4599-8d4a-5f0d8f632165 service nova] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Received event network-vif-deleted-f8cf63ba-ee62-4a3a-85e0-87d88ff84665 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1384.485203] env[61978]: DEBUG nova.scheduler.client.report [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 157 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1384.485514] env[61978]: DEBUG nova.compute.provider_tree [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 157 to 158 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1384.485726] env[61978]: DEBUG nova.compute.provider_tree [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1384.569907] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.570888] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Processing image c3d6fb3f-8d33-4f48-a74c-692608eb4ac9 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1384.570888] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.570888] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.570888] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1384.572011] env[61978]: DEBUG nova.compute.manager [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1384.574020] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1222d90-7d79-4347-ac6a-4de03e1a71e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.582716] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1384.582920] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1384.583644] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40d07a43-3053-4bfc-b733-ffc2fce1e9e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.590085] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1384.590085] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]527a2ccd-5e7b-a33a-46f7-cfe0ccc52a71" [ 1384.590085] env[61978]: _type = "Task" [ 1384.590085] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.606321] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Preparing fetch location {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1384.606496] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Fetch image to [datastore2] OSTACK_IMG_03d438ca-622f-4579-8c73-19748cc08b28/OSTACK_IMG_03d438ca-622f-4579-8c73-19748cc08b28.vmdk {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1384.606775] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Downloading stream optimized image c3d6fb3f-8d33-4f48-a74c-692608eb4ac9 to [datastore2] OSTACK_IMG_03d438ca-622f-4579-8c73-19748cc08b28/OSTACK_IMG_03d438ca-622f-4579-8c73-19748cc08b28.vmdk on the data store datastore2 as vApp {{(pid=61978) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1384.607014] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Downloading image file data c3d6fb3f-8d33-4f48-a74c-692608eb4ac9 to the ESX as VM named 'OSTACK_IMG_03d438ca-622f-4579-8c73-19748cc08b28' {{(pid=61978) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1384.610933] env[61978]: DEBUG nova.virt.hardware [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1384.611221] env[61978]: DEBUG nova.virt.hardware [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1384.611421] env[61978]: DEBUG nova.virt.hardware [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1384.611668] env[61978]: DEBUG nova.virt.hardware [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1384.611861] env[61978]: DEBUG nova.virt.hardware [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1384.612067] env[61978]: DEBUG nova.virt.hardware [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1384.613137] env[61978]: DEBUG nova.virt.hardware [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1384.613137] env[61978]: DEBUG nova.virt.hardware [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1384.613137] env[61978]: DEBUG nova.virt.hardware [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1384.613137] env[61978]: DEBUG nova.virt.hardware [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1384.613291] env[61978]: DEBUG nova.virt.hardware [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1384.614326] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19308b8d-cb3a-470e-b999-ce054a296182 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.623523] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8f3c38-b093-469c-b1d8-fdba60dbb193 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.691122] env[61978]: DEBUG oslo_vmware.rw_handles [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1384.691122] env[61978]: value = "resgroup-9" [ 1384.691122] env[61978]: _type = "ResourcePool" [ 1384.691122] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1384.691747] env[61978]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-dfa3fd4f-2aa1-42ec-8d49-23319c37f8ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.712190] env[61978]: DEBUG oslo_vmware.rw_handles [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lease: (returnval){ [ 1384.712190] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e5475f-8c60-9a87-1ae2-abba31aa5331" [ 1384.712190] env[61978]: _type = "HttpNfcLease" [ 1384.712190] env[61978]: } obtained for vApp import into resource pool (val){ [ 1384.712190] env[61978]: value = "resgroup-9" [ 1384.712190] env[61978]: _type = "ResourcePool" [ 1384.712190] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1384.712645] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the lease: (returnval){ [ 1384.712645] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e5475f-8c60-9a87-1ae2-abba31aa5331" [ 1384.712645] env[61978]: _type = "HttpNfcLease" [ 1384.712645] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1384.718599] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1384.718599] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e5475f-8c60-9a87-1ae2-abba31aa5331" [ 1384.718599] env[61978]: _type = "HttpNfcLease" [ 1384.718599] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1384.842423] env[61978]: DEBUG oslo_concurrency.lockutils [req-ae862098-76a7-45bb-a426-d67a2d8ccb91 req-70f852ea-2412-4b19-934e-b7ee2760481d service nova] Releasing lock "refresh_cache-fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.927389] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 27713bbd-1234-44ae-8520-78d85baaae12] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1384.992236] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.442s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.995022] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.144s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.022780] env[61978]: INFO nova.scheduler.client.report [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Deleted allocations for instance 7823099f-efdf-46bf-85d7-69e105dfb02c [ 1385.085293] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7599c7d-25b9-4f9c-85c5-67e61d1317ce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.097511] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03b52d9-6800-412b-9d07-d14292d066b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.132642] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917b56df-69d6-484b-86ef-ac9fff73bfee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.142899] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364a136f-71f5-43a8-8c75-65ba6548067d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.157061] env[61978]: DEBUG nova.compute.provider_tree [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1385.185847] env[61978]: DEBUG nova.network.neutron [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Successfully updated port: 7e93119a-e6cd-401f-bb4a-c41a130cd596 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1385.221748] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1385.221748] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e5475f-8c60-9a87-1ae2-abba31aa5331" [ 1385.221748] env[61978]: _type = "HttpNfcLease" [ 1385.221748] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1385.222155] env[61978]: DEBUG oslo_vmware.rw_handles [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1385.222155] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52e5475f-8c60-9a87-1ae2-abba31aa5331" [ 1385.222155] env[61978]: _type = "HttpNfcLease" [ 1385.222155] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1385.222991] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3e1c03-8104-4a02-ba9c-5e3527b22508 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.230089] env[61978]: DEBUG oslo_vmware.rw_handles [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cad34f-6a15-127a-5503-baf5f425d8ca/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1385.230335] env[61978]: DEBUG oslo_vmware.rw_handles [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating HTTP connection to write to file with size = 31664640 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cad34f-6a15-127a-5503-baf5f425d8ca/disk-0.vmdk. {{(pid=61978) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1385.297710] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e83b9eb7-7538-46e9-8cb2-e770fa8d1165 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.435821] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: e9b70b36-d0d8-430e-a5e7-588d3c75d7ff] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1385.531599] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0e020106-776f-4af7-9119-d611acaf2a3b tempest-AttachInterfacesTestJSON-408106683 tempest-AttachInterfacesTestJSON-408106683-project-member] Lock "7823099f-efdf-46bf-85d7-69e105dfb02c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.702s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.660955] env[61978]: DEBUG nova.scheduler.client.report [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1385.688389] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Acquiring lock "refresh_cache-ce71756d-7a11-46d4-a5dd-a5b720df83c6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.688578] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Acquired lock "refresh_cache-ce71756d-7a11-46d4-a5dd-a5b720df83c6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.688728] env[61978]: DEBUG nova.network.neutron [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1385.941766] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b76dd94e-c14b-48d4-bb7f-020313412ca2] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1386.148196] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Acquiring lock "1ec56ce5-c580-4369-ac0a-59c0782ac570" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.148600] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Lock "1ec56ce5-c580-4369-ac0a-59c0782ac570" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.173143] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.178s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.173500] env[61978]: INFO nova.compute.manager [None req-b1905f3a-328b-4462-bf0b-d6220df9ae63 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] [instance: db960922-12b5-41e7-9de3-312136819bb0] Successfully reverted task state from rebuilding on failure for instance. [ 1386.188584] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.695s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.189053] env[61978]: DEBUG nova.objects.instance [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lazy-loading 'resources' on Instance uuid db960922-12b5-41e7-9de3-312136819bb0 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1386.246120] env[61978]: DEBUG nova.network.neutron [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1386.445055] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 90a38dba-0dae-455a-8d02-44c2bb098fb5] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1386.491083] env[61978]: DEBUG nova.compute.manager [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Received event network-vif-plugged-7e93119a-e6cd-401f-bb4a-c41a130cd596 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1386.491321] env[61978]: DEBUG oslo_concurrency.lockutils [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] Acquiring lock "ce71756d-7a11-46d4-a5dd-a5b720df83c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.491958] env[61978]: DEBUG oslo_concurrency.lockutils [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] Lock "ce71756d-7a11-46d4-a5dd-a5b720df83c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.492217] env[61978]: DEBUG oslo_concurrency.lockutils [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] Lock "ce71756d-7a11-46d4-a5dd-a5b720df83c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.492446] env[61978]: DEBUG nova.compute.manager [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] No waiting events found dispatching network-vif-plugged-7e93119a-e6cd-401f-bb4a-c41a130cd596 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1386.492637] env[61978]: WARNING nova.compute.manager [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Received unexpected event network-vif-plugged-7e93119a-e6cd-401f-bb4a-c41a130cd596 for instance with vm_state building and task_state spawning. [ 1386.492808] env[61978]: DEBUG nova.compute.manager [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Received event network-changed-7e93119a-e6cd-401f-bb4a-c41a130cd596 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1386.492973] env[61978]: DEBUG nova.compute.manager [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Refreshing instance network info cache due to event network-changed-7e93119a-e6cd-401f-bb4a-c41a130cd596. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1386.493239] env[61978]: DEBUG oslo_concurrency.lockutils [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] Acquiring lock "refresh_cache-ce71756d-7a11-46d4-a5dd-a5b720df83c6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1386.509141] env[61978]: DEBUG nova.network.neutron [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Updating instance_info_cache with network_info: [{"id": "7e93119a-e6cd-401f-bb4a-c41a130cd596", "address": "fa:16:3e:92:0f:ab", "network": {"id": "4d7f890c-f557-4a16-8ebd-6d2a5184b9fd", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1332266824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1734dd57233346c2b3214f5424b55c64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e93119a-e6", "ovs_interfaceid": "7e93119a-e6cd-401f-bb4a-c41a130cd596", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.532548] env[61978]: DEBUG oslo_vmware.rw_handles [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Completed reading data from the image iterator. {{(pid=61978) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1386.532851] env[61978]: DEBUG oslo_vmware.rw_handles [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cad34f-6a15-127a-5503-baf5f425d8ca/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1386.534463] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55eccd0e-93c4-4993-8450-55a20701a2d1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.540959] env[61978]: DEBUG oslo_vmware.rw_handles [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cad34f-6a15-127a-5503-baf5f425d8ca/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1386.541167] env[61978]: DEBUG oslo_vmware.rw_handles [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cad34f-6a15-127a-5503-baf5f425d8ca/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1386.541876] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-b6ecb7b7-de41-4119-99a5-f0a4de555d70 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.651514] env[61978]: DEBUG nova.compute.manager [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1386.753208] env[61978]: DEBUG oslo_vmware.rw_handles [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cad34f-6a15-127a-5503-baf5f425d8ca/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1386.753538] env[61978]: INFO nova.virt.vmwareapi.images [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Downloaded image file data c3d6fb3f-8d33-4f48-a74c-692608eb4ac9 [ 1386.754340] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a3db6d-dc20-495c-9c62-8081b911b08a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.776782] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17e2ea9f-f8c0-45d5-8c4c-dbe773cd0d09 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.800935] env[61978]: INFO nova.virt.vmwareapi.images [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] The imported VM was unregistered [ 1386.803912] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Caching image {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1386.804190] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Creating directory with path [datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9 {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1386.804478] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2b3e399-838a-4b47-aa9b-22ed169e92a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.828211] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Created directory with path [datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9 {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1386.828434] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_03d438ca-622f-4579-8c73-19748cc08b28/OSTACK_IMG_03d438ca-622f-4579-8c73-19748cc08b28.vmdk to [datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9.vmdk. {{(pid=61978) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1386.829945] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06b5985-24bb-4012-b0d6-56de44cdb18e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.833017] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-d6240b0d-e701-4e4a-b60c-f4ab2186f394 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.843238] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008a777e-5c12-49f2-bc8a-1c02bbf1ef4c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.843238] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1386.843238] env[61978]: value = "task-1396054" [ 1386.843238] env[61978]: _type = "Task" [ 1386.843238] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.874093] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a1aedc-c769-4ce4-9307-196f38b05b5d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.880516] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396054, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.886291] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b402d27-3df4-4d93-99b2-e906854b2613 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.900108] env[61978]: DEBUG nova.compute.provider_tree [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.948671] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 97e128f9-7135-46b0-b22a-ee5449ba48b6] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1387.011428] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Releasing lock "refresh_cache-ce71756d-7a11-46d4-a5dd-a5b720df83c6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.011934] env[61978]: DEBUG nova.compute.manager [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Instance network_info: |[{"id": "7e93119a-e6cd-401f-bb4a-c41a130cd596", "address": "fa:16:3e:92:0f:ab", "network": {"id": "4d7f890c-f557-4a16-8ebd-6d2a5184b9fd", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1332266824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1734dd57233346c2b3214f5424b55c64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e93119a-e6", "ovs_interfaceid": "7e93119a-e6cd-401f-bb4a-c41a130cd596", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1387.012421] env[61978]: DEBUG oslo_concurrency.lockutils [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] Acquired lock "refresh_cache-ce71756d-7a11-46d4-a5dd-a5b720df83c6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.012765] env[61978]: DEBUG nova.network.neutron [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Refreshing network info cache for port 7e93119a-e6cd-401f-bb4a-c41a130cd596 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1387.014331] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:0f:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0685bd0b-3dbf-4a06-951c-c6a4726dd4b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e93119a-e6cd-401f-bb4a-c41a130cd596', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1387.023120] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Creating folder: Project (1734dd57233346c2b3214f5424b55c64). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1387.024209] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b80af285-bb0f-435e-9e9d-c29b44fae2d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.038199] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Created folder: Project (1734dd57233346c2b3214f5424b55c64) in parent group-v295764. [ 1387.038199] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Creating folder: Instances. Parent ref: group-v296054. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1387.038199] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b35a6c8-d979-4832-9dda-d9479b354461 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.048524] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Created folder: Instances in parent group-v296054. [ 1387.048799] env[61978]: DEBUG oslo.service.loopingcall [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1387.049008] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1387.049282] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0bb45f54-42cc-47c4-9e0b-bb5e04ddb85c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.072976] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1387.072976] env[61978]: value = "task-1396057" [ 1387.072976] env[61978]: _type = "Task" [ 1387.072976] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.079869] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396057, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.172921] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.354874] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396054, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.403153] env[61978]: DEBUG nova.scheduler.client.report [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1387.454597] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 68791dff-12e0-499d-8835-1e9173af570f] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1387.585862] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396057, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.855123] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396054, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.909449] env[61978]: DEBUG nova.network.neutron [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Updated VIF entry in instance network info cache for port 7e93119a-e6cd-401f-bb4a-c41a130cd596. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1387.909921] env[61978]: DEBUG nova.network.neutron [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Updating instance_info_cache with network_info: [{"id": "7e93119a-e6cd-401f-bb4a-c41a130cd596", "address": "fa:16:3e:92:0f:ab", "network": {"id": "4d7f890c-f557-4a16-8ebd-6d2a5184b9fd", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1332266824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1734dd57233346c2b3214f5424b55c64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e93119a-e6", "ovs_interfaceid": "7e93119a-e6cd-401f-bb4a-c41a130cd596", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.911813] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.723s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.914626] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.556s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.914761] env[61978]: DEBUG nova.objects.instance [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'resources' on Instance uuid ac1676dd-affa-49cd-9e7b-a301abcec232 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1387.958219] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: c0be687a-7444-4019-8b12-dac41a7c080e] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1388.087357] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396057, 'name': CreateVM_Task, 'duration_secs': 0.826149} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.087357] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1388.087357] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.087357] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.087357] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1388.087357] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-791e635c-9d0c-4d9e-9b1e-1f62ff4e0d96 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.091455] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Waiting for the task: (returnval){ [ 1388.091455] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524959d6-4f5a-1a37-eec7-6eb45ce0e16d" [ 1388.091455] env[61978]: _type = "Task" [ 1388.091455] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.099873] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524959d6-4f5a-1a37-eec7-6eb45ce0e16d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.355200] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396054, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.415458] env[61978]: DEBUG oslo_concurrency.lockutils [req-d5fecd96-6fd0-49aa-a0c1-c9aa0866c3ac req-5f1cacee-68a2-4d14-ae8f-4db717867fde service nova] Releasing lock "refresh_cache-ce71756d-7a11-46d4-a5dd-a5b720df83c6" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.432615] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9c78388b-8a2e-4369-bbaa-c1eb29800fa1 tempest-ServerActionsV293TestJSON-1939665796 tempest-ServerActionsV293TestJSON-1939665796-project-member] Lock "db960922-12b5-41e7-9de3-312136819bb0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.264s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.461613] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 758b16e4-bb97-4fd9-bed2-54ef7fdfa4d9] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1388.497212] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a8d198-616a-42c3-b53c-5cd33281bfa5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.505693] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe3a93d-a2b7-4bfd-8085-b53f0b514b7d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.536550] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f01aef-53e0-4b37-88fe-3e810983e278 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.544215] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445bcf90-7dc4-421b-9705-f7d69b148392 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.559896] env[61978]: DEBUG nova.compute.provider_tree [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1388.603494] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524959d6-4f5a-1a37-eec7-6eb45ce0e16d, 'name': SearchDatastore_Task, 'duration_secs': 0.086015} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.603494] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.603494] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1388.603741] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.603774] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.603972] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1388.604264] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5bb7469b-9696-4504-8215-ff0d8abf0a70 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.618296] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1388.618518] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1388.619401] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a99d30ac-478d-4c3b-917e-b270d12d1365 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.624757] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Waiting for the task: (returnval){ [ 1388.624757] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a3438c-7e20-8048-86d5-67f08494793a" [ 1388.624757] env[61978]: _type = "Task" [ 1388.624757] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.634488] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a3438c-7e20-8048-86d5-67f08494793a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.857831] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396054, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.965161] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 5d1d19d8-241b-41b8-b1c0-caf54f8fd600] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1389.063049] env[61978]: DEBUG nova.scheduler.client.report [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1389.135073] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a3438c-7e20-8048-86d5-67f08494793a, 'name': SearchDatastore_Task, 'duration_secs': 0.080717} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.136040] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00cea6d0-31a8-4cc9-90a9-fe710eea4cf2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.142736] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Waiting for the task: (returnval){ [ 1389.142736] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52259302-abcb-1a1a-5b6d-256cbdef9a95" [ 1389.142736] env[61978]: _type = "Task" [ 1389.142736] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.153724] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52259302-abcb-1a1a-5b6d-256cbdef9a95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.356369] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396054, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.399134} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.356603] env[61978]: INFO nova.virt.vmwareapi.ds_util [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_03d438ca-622f-4579-8c73-19748cc08b28/OSTACK_IMG_03d438ca-622f-4579-8c73-19748cc08b28.vmdk to [datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9.vmdk. [ 1389.356803] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Cleaning up location [datastore2] OSTACK_IMG_03d438ca-622f-4579-8c73-19748cc08b28 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1389.356974] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_03d438ca-622f-4579-8c73-19748cc08b28 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1389.357234] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12190920-a693-4726-8608-829d14e9f8d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.363034] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1389.363034] env[61978]: value = "task-1396058" [ 1389.363034] env[61978]: _type = "Task" [ 1389.363034] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.369932] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.468753] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b9ecba01-6306-4bc4-aafa-a8d0d8f79ec2] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1389.567559] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.653s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.569966] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.397s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.571441] env[61978]: INFO nova.compute.claims [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1389.586815] env[61978]: INFO nova.scheduler.client.report [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleted allocations for instance ac1676dd-affa-49cd-9e7b-a301abcec232 [ 1389.653341] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52259302-abcb-1a1a-5b6d-256cbdef9a95, 'name': SearchDatastore_Task, 'duration_secs': 0.042458} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.653625] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.653901] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] ce71756d-7a11-46d4-a5dd-a5b720df83c6/ce71756d-7a11-46d4-a5dd-a5b720df83c6.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1389.654252] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2eabe25c-61e0-4da1-94cd-cd82cf7f8009 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.660850] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Waiting for the task: (returnval){ [ 1389.660850] env[61978]: value = "task-1396059" [ 1389.660850] env[61978]: _type = "Task" [ 1389.660850] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.668959] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396059, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.875032] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035281} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.875294] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1389.875521] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.877056] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9.vmdk to [datastore2] fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd/fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1389.877056] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d483086a-9401-4883-86d6-de6b3a8feac1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.884017] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1389.884017] env[61978]: value = "task-1396060" [ 1389.884017] env[61978]: _type = "Task" [ 1389.884017] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.893753] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.972687] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 612aba6c-a30d-4eeb-8f85-e791bda55582] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1390.094859] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0bf5ab30-6074-497c-9587-746dc8e912cd tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "ac1676dd-affa-49cd-9e7b-a301abcec232" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.906s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.170134] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396059, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452137} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.170371] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] ce71756d-7a11-46d4-a5dd-a5b720df83c6/ce71756d-7a11-46d4-a5dd-a5b720df83c6.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1390.170593] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1390.170851] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d54c246b-e490-4d54-8c9b-a0735b74ea41 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.176940] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Waiting for the task: (returnval){ [ 1390.176940] env[61978]: value = "task-1396061" [ 1390.176940] env[61978]: _type = "Task" [ 1390.176940] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.183966] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396061, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.393788] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396060, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.476748] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: d3c82821-0617-4de6-8109-813a67910ed1] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1390.635936] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82a4b39-96a3-4a9d-bcab-a011f2aa1aba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.645230] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7c9e14-aad2-4e3e-8511-a2d2d67e8b2d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.677447] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa47edc-3786-4496-9bbd-9d8fea9913db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.692196] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cebe0e2-8f8e-467d-8bef-5e05228c8de1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.696702] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396061, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065456} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.697151] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1390.698419] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9eaa76-167f-4a47-bf53-75b1c2439eda {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.710517] env[61978]: DEBUG nova.compute.provider_tree [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1390.732243] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] ce71756d-7a11-46d4-a5dd-a5b720df83c6/ce71756d-7a11-46d4-a5dd-a5b720df83c6.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1390.733414] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-358df978-6c0f-4495-aa85-51697ec7cafb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.754065] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Waiting for the task: (returnval){ [ 1390.754065] env[61978]: value = "task-1396062" [ 1390.754065] env[61978]: _type = "Task" [ 1390.754065] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.762391] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396062, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.895542] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396060, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.980424] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 81f0b79c-97b3-4a5d-a8fc-7c2250571177] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1391.230541] env[61978]: ERROR nova.scheduler.client.report [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [req-ed8820ab-249f-4146-9002-c5de69503aee] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ed8820ab-249f-4146-9002-c5de69503aee"}]} [ 1391.247400] env[61978]: DEBUG nova.scheduler.client.report [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1391.263646] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396062, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.264782] env[61978]: DEBUG nova.scheduler.client.report [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1391.264994] env[61978]: DEBUG nova.compute.provider_tree [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1391.277602] env[61978]: DEBUG nova.scheduler.client.report [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1391.295866] env[61978]: DEBUG nova.scheduler.client.report [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1391.364087] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235143ee-28e5-4280-aa19-b02e47566349 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.372140] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888bbcf6-d2d0-47c2-97f7-20581836ccfc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.405934] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ccabe0-22fe-43b1-b5ce-5861b236d0ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.413087] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396060, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.416246] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9af487-f4d0-413a-b165-00369bae03d4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.431914] env[61978]: DEBUG nova.compute.provider_tree [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1391.483896] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 7d388d5c-2120-4dc5-a04f-5394e1e6f852] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1391.689538] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "30d6cc11-0258-47aa-b083-7c103c91acf2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.689914] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "30d6cc11-0258-47aa-b083-7c103c91acf2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.766373] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396062, 'name': ReconfigVM_Task, 'duration_secs': 0.923499} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.766613] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Reconfigured VM instance instance-0000006d to attach disk [datastore2] ce71756d-7a11-46d4-a5dd-a5b720df83c6/ce71756d-7a11-46d4-a5dd-a5b720df83c6.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1391.767289] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57672539-cd4a-4bb4-9c86-3230e961aaa4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.775785] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Waiting for the task: (returnval){ [ 1391.775785] env[61978]: value = "task-1396063" [ 1391.775785] env[61978]: _type = "Task" [ 1391.775785] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.786037] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396063, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.913175] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396060, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.964900] env[61978]: DEBUG nova.scheduler.client.report [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 160 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1391.965218] env[61978]: DEBUG nova.compute.provider_tree [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 160 to 161 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1391.965406] env[61978]: DEBUG nova.compute.provider_tree [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1391.987322] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 1eae10e8-58b1-435d-86fc-0674725ce6cd] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1392.193111] env[61978]: DEBUG nova.compute.manager [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1392.284920] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396063, 'name': Rename_Task, 'duration_secs': 0.358958} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.285217] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1392.285462] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd80ba0f-2d7c-4505-9a29-bb3c4391de38 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.291160] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Waiting for the task: (returnval){ [ 1392.291160] env[61978]: value = "task-1396064" [ 1392.291160] env[61978]: _type = "Task" [ 1392.291160] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.299830] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.412437] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396060, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.386704} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.412689] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9/c3d6fb3f-8d33-4f48-a74c-692608eb4ac9.vmdk to [datastore2] fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd/fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1392.413512] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62399afa-ec4d-48ce-9ccc-5e132e13f3fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.434375] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd/fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd.vmdk or device None with type streamOptimized {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1392.434604] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f48d296-a01b-4432-aa16-531a93e37c69 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.452190] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1392.452190] env[61978]: value = "task-1396065" [ 1392.452190] env[61978]: _type = "Task" [ 1392.452190] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.461327] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396065, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.471137] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.901s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.471662] env[61978]: DEBUG nova.compute.manager [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1392.490256] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 94665d8c-df88-4ad0-bb90-547ace2d6345] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1392.719959] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.720268] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.721905] env[61978]: INFO nova.compute.claims [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1392.802171] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396064, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.962196] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396065, 'name': ReconfigVM_Task, 'duration_secs': 0.285712} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.962538] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Reconfigured VM instance instance-00000066 to attach disk [datastore2] fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd/fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd.vmdk or device None with type streamOptimized {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1392.963169] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65b1d726-95cd-4819-bfd5-c662797238dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.970073] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1392.970073] env[61978]: value = "task-1396066" [ 1392.970073] env[61978]: _type = "Task" [ 1392.970073] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.976352] env[61978]: DEBUG nova.compute.utils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1392.981234] env[61978]: DEBUG nova.compute.manager [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1392.981406] env[61978]: DEBUG nova.network.neutron [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1392.983079] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396066, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.995051] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a0c5a13b-ac59-44f9-8cc8-dafb0a7c10d6] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1393.021848] env[61978]: DEBUG nova.policy [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3807584bc854eb2a2c6885b867ced7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd8a5b19deec24b2f8dcbbbd6ed30deaa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1393.293189] env[61978]: DEBUG nova.network.neutron [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Successfully created port: 9a17bc0d-88b6-4fce-95ef-62a0eb499feb {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1393.305897] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396064, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.481257] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396066, 'name': Rename_Task, 'duration_secs': 0.150515} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.481585] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1393.481858] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ccfd99c-e06b-4640-8619-cce32b300b95 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.483926] env[61978]: DEBUG nova.compute.manager [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1393.491833] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1393.491833] env[61978]: value = "task-1396067" [ 1393.491833] env[61978]: _type = "Task" [ 1393.491833] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.501759] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 38e4f039-20bc-4bed-b449-227bde070ed9] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1393.503860] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396067, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.794999] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69e421d-0b17-420f-bc2b-7214141ca2ce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.807926] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20c7d71-4fcf-44f2-8c03-8c938353efff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.810915] env[61978]: DEBUG oslo_vmware.api [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396064, 'name': PowerOnVM_Task, 'duration_secs': 1.23058} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.811190] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1393.811398] env[61978]: INFO nova.compute.manager [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Took 9.24 seconds to spawn the instance on the hypervisor. [ 1393.811617] env[61978]: DEBUG nova.compute.manager [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1393.812648] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d41c723-83c4-45d7-9089-0a82d9f3bedf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.839132] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85134ad6-b77c-4f85-b033-da4d8285c5a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.850609] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33547856-e115-48b7-902a-0a629dabbdc1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.864738] env[61978]: DEBUG nova.compute.provider_tree [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1394.001663] env[61978]: DEBUG oslo_vmware.api [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396067, 'name': PowerOnVM_Task, 'duration_secs': 0.47285} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.001982] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1394.005181] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 35a6d3ec-8688-43c2-93c4-b23033aaf280] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1394.105867] env[61978]: DEBUG nova.compute.manager [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1394.106852] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7945228f-2b96-43d3-b5bb-e3168618f3d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.353766] env[61978]: INFO nova.compute.manager [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Took 21.71 seconds to build instance. [ 1394.367433] env[61978]: DEBUG nova.scheduler.client.report [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1394.493255] env[61978]: DEBUG nova.compute.manager [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1394.508257] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 1f6d930a-6a27-4f5f-ae7a-6fa8e5aa609d] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1394.523298] env[61978]: DEBUG nova.virt.hardware [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1394.523564] env[61978]: DEBUG nova.virt.hardware [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1394.523730] env[61978]: DEBUG nova.virt.hardware [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1394.523925] env[61978]: DEBUG nova.virt.hardware [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1394.524089] env[61978]: DEBUG nova.virt.hardware [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1394.524245] env[61978]: DEBUG nova.virt.hardware [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1394.524459] env[61978]: DEBUG nova.virt.hardware [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1394.524624] env[61978]: DEBUG nova.virt.hardware [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1394.524955] env[61978]: DEBUG nova.virt.hardware [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1394.525187] env[61978]: DEBUG nova.virt.hardware [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1394.525373] env[61978]: DEBUG nova.virt.hardware [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1394.526424] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06746aaf-9ce4-4ad3-860a-6906a7d11f09 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.535480] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6baa031-6285-411f-aef7-ceca69b0438f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.623766] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ad1520b3-635a-4d01-9857-eb476ab9cbb9 tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 24.452s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.685572] env[61978]: DEBUG nova.compute.manager [req-82fa188a-8b0c-48a3-a66c-5a41f25be59b req-f76bea25-bbe6-4f19-8fa8-fcb14a624404 service nova] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Received event network-vif-plugged-9a17bc0d-88b6-4fce-95ef-62a0eb499feb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1394.685800] env[61978]: DEBUG oslo_concurrency.lockutils [req-82fa188a-8b0c-48a3-a66c-5a41f25be59b req-f76bea25-bbe6-4f19-8fa8-fcb14a624404 service nova] Acquiring lock "1ec56ce5-c580-4369-ac0a-59c0782ac570-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.686037] env[61978]: DEBUG oslo_concurrency.lockutils [req-82fa188a-8b0c-48a3-a66c-5a41f25be59b req-f76bea25-bbe6-4f19-8fa8-fcb14a624404 service nova] Lock "1ec56ce5-c580-4369-ac0a-59c0782ac570-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.686216] env[61978]: DEBUG oslo_concurrency.lockutils [req-82fa188a-8b0c-48a3-a66c-5a41f25be59b req-f76bea25-bbe6-4f19-8fa8-fcb14a624404 service nova] Lock "1ec56ce5-c580-4369-ac0a-59c0782ac570-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.686391] env[61978]: DEBUG nova.compute.manager [req-82fa188a-8b0c-48a3-a66c-5a41f25be59b req-f76bea25-bbe6-4f19-8fa8-fcb14a624404 service nova] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] No waiting events found dispatching network-vif-plugged-9a17bc0d-88b6-4fce-95ef-62a0eb499feb {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1394.686558] env[61978]: WARNING nova.compute.manager [req-82fa188a-8b0c-48a3-a66c-5a41f25be59b req-f76bea25-bbe6-4f19-8fa8-fcb14a624404 service nova] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Received unexpected event network-vif-plugged-9a17bc0d-88b6-4fce-95ef-62a0eb499feb for instance with vm_state building and task_state spawning. [ 1394.768848] env[61978]: DEBUG nova.network.neutron [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Successfully updated port: 9a17bc0d-88b6-4fce-95ef-62a0eb499feb {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1394.840726] env[61978]: DEBUG oslo_concurrency.lockutils [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Acquiring lock "ce71756d-7a11-46d4-a5dd-a5b720df83c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.858402] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a31b5782-0a59-4993-8033-afe805b8aa7b tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Lock "ce71756d-7a11-46d4-a5dd-a5b720df83c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.224s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.858402] env[61978]: DEBUG oslo_concurrency.lockutils [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Lock "ce71756d-7a11-46d4-a5dd-a5b720df83c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.016s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.858402] env[61978]: DEBUG oslo_concurrency.lockutils [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Acquiring lock "ce71756d-7a11-46d4-a5dd-a5b720df83c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.858402] env[61978]: DEBUG oslo_concurrency.lockutils [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Lock "ce71756d-7a11-46d4-a5dd-a5b720df83c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.858402] env[61978]: DEBUG oslo_concurrency.lockutils [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Lock "ce71756d-7a11-46d4-a5dd-a5b720df83c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.861180] env[61978]: INFO nova.compute.manager [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Terminating instance [ 1394.864039] env[61978]: DEBUG nova.compute.manager [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1394.864372] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1394.865746] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e69121c-a576-40df-9516-3744c2ded169 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.872389] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.152s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.873290] env[61978]: DEBUG nova.compute.manager [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1394.880500] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1394.881108] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8803dc1c-eb10-4477-9cea-7863782741da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.891444] env[61978]: DEBUG oslo_vmware.api [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Waiting for the task: (returnval){ [ 1394.891444] env[61978]: value = "task-1396068" [ 1394.891444] env[61978]: _type = "Task" [ 1394.891444] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.902450] env[61978]: DEBUG oslo_vmware.api [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396068, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.012156] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: de0f46af-870a-4095-a417-913a2c51f66b] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1395.272039] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Acquiring lock "refresh_cache-1ec56ce5-c580-4369-ac0a-59c0782ac570" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.272207] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Acquired lock "refresh_cache-1ec56ce5-c580-4369-ac0a-59c0782ac570" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.272365] env[61978]: DEBUG nova.network.neutron [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1395.382352] env[61978]: DEBUG nova.compute.utils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1395.383734] env[61978]: DEBUG nova.compute.manager [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1395.383910] env[61978]: DEBUG nova.network.neutron [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1395.402058] env[61978]: DEBUG oslo_vmware.api [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396068, 'name': PowerOffVM_Task, 'duration_secs': 0.195102} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.402190] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1395.402268] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1395.402504] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b3116b0-c9b2-4821-afc8-1d0e325bfe3e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.421921] env[61978]: DEBUG nova.policy [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a208cee3d9c4efb8240ad943b55e915', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86ad52b551104a2594f1dbbc287f9efa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1395.464589] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1395.464875] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1395.465156] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Deleting the datastore file [datastore2] ce71756d-7a11-46d4-a5dd-a5b720df83c6 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1395.465469] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84de39d9-fd97-4562-ad19-20c3b1ef61df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.472653] env[61978]: DEBUG oslo_vmware.api [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Waiting for the task: (returnval){ [ 1395.472653] env[61978]: value = "task-1396070" [ 1395.472653] env[61978]: _type = "Task" [ 1395.472653] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.480989] env[61978]: DEBUG oslo_vmware.api [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396070, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.515808] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: ae6b92bb-6f79-4b52-bdb7-095985bf2fad] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1395.570362] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.570764] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.570995] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.571208] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.571413] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.573436] env[61978]: INFO nova.compute.manager [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Terminating instance [ 1395.575229] env[61978]: DEBUG nova.compute.manager [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1395.575430] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1395.576260] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fec00be-4725-45b3-9dde-4afcc0d6dab1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.585210] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1395.585480] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c128732-c6a9-4a6d-9abb-c3d449b64411 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.592154] env[61978]: DEBUG oslo_vmware.api [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1395.592154] env[61978]: value = "task-1396071" [ 1395.592154] env[61978]: _type = "Task" [ 1395.592154] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.599693] env[61978]: DEBUG oslo_vmware.api [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396071, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.688927] env[61978]: DEBUG nova.network.neutron [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Successfully created port: e1688d68-33f0-48a2-8d22-475b9f9bacfb {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1395.818619] env[61978]: DEBUG nova.network.neutron [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1395.886918] env[61978]: DEBUG nova.compute.manager [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1395.981366] env[61978]: DEBUG oslo_vmware.api [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Task: {'id': task-1396070, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151027} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.981626] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1395.981816] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1395.982009] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1395.982201] env[61978]: INFO nova.compute.manager [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1395.982503] env[61978]: DEBUG oslo.service.loopingcall [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1395.982654] env[61978]: DEBUG nova.compute.manager [-] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1395.982748] env[61978]: DEBUG nova.network.neutron [-] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1396.019226] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: c861eaa2-1c57-476f-92b3-886c8e44f6b4] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1396.042866] env[61978]: DEBUG nova.network.neutron [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Updating instance_info_cache with network_info: [{"id": "9a17bc0d-88b6-4fce-95ef-62a0eb499feb", "address": "fa:16:3e:28:55:ae", "network": {"id": "7de5cb4e-63ba-4de9-94dc-84f8ead2ebd6", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1885745338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8a5b19deec24b2f8dcbbbd6ed30deaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a17bc0d-88", "ovs_interfaceid": "9a17bc0d-88b6-4fce-95ef-62a0eb499feb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.101779] env[61978]: DEBUG oslo_vmware.api [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396071, 'name': PowerOffVM_Task, 'duration_secs': 0.193347} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.102076] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1396.102255] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1396.102577] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-64ccfa45-0242-4e71-ae6b-58853c1ee394 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.161722] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1396.162008] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1396.162216] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleting the datastore file [datastore2] fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1396.162579] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b46f8b89-22bf-48e7-a70e-477e0eb14c37 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.168441] env[61978]: DEBUG oslo_vmware.api [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for the task: (returnval){ [ 1396.168441] env[61978]: value = "task-1396073" [ 1396.168441] env[61978]: _type = "Task" [ 1396.168441] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.176360] env[61978]: DEBUG oslo_vmware.api [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396073, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.521929] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: cf6d8815-ed87-4629-9df9-6f406ac2fe6e] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1396.545273] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Releasing lock "refresh_cache-1ec56ce5-c580-4369-ac0a-59c0782ac570" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.545669] env[61978]: DEBUG nova.compute.manager [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Instance network_info: |[{"id": "9a17bc0d-88b6-4fce-95ef-62a0eb499feb", "address": "fa:16:3e:28:55:ae", "network": {"id": "7de5cb4e-63ba-4de9-94dc-84f8ead2ebd6", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1885745338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8a5b19deec24b2f8dcbbbd6ed30deaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a17bc0d-88", "ovs_interfaceid": "9a17bc0d-88b6-4fce-95ef-62a0eb499feb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1396.546120] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:55:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c47e98ff-83cf-48d2-bf91-2931c7386b6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a17bc0d-88b6-4fce-95ef-62a0eb499feb', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1396.553599] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Creating folder: Project (d8a5b19deec24b2f8dcbbbd6ed30deaa). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1396.554173] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c69ec069-3343-49af-a504-ad53d14e308d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.566008] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Created folder: Project (d8a5b19deec24b2f8dcbbbd6ed30deaa) in parent group-v295764. [ 1396.566206] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Creating folder: Instances. Parent ref: group-v296057. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1396.566430] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-195f0a2f-2481-4da1-bb3b-6fe9c472148a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.574266] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Created folder: Instances in parent group-v296057. [ 1396.574486] env[61978]: DEBUG oslo.service.loopingcall [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.574662] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1396.574853] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a16e5de-4de9-4135-897a-4801a1615f62 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.592766] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1396.592766] env[61978]: value = "task-1396076" [ 1396.592766] env[61978]: _type = "Task" [ 1396.592766] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.599820] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396076, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.678631] env[61978]: DEBUG oslo_vmware.api [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Task: {'id': task-1396073, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147564} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.678933] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1396.679149] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1396.679335] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1396.679514] env[61978]: INFO nova.compute.manager [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1396.679757] env[61978]: DEBUG oslo.service.loopingcall [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.679973] env[61978]: DEBUG nova.compute.manager [-] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1396.680104] env[61978]: DEBUG nova.network.neutron [-] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1396.715608] env[61978]: DEBUG nova.compute.manager [req-b91558c2-3968-40f8-ada6-e92a85114979 req-da2085d7-ad74-41d2-9ca2-df70016b68c1 service nova] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Received event network-changed-9a17bc0d-88b6-4fce-95ef-62a0eb499feb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1396.715805] env[61978]: DEBUG nova.compute.manager [req-b91558c2-3968-40f8-ada6-e92a85114979 req-da2085d7-ad74-41d2-9ca2-df70016b68c1 service nova] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Refreshing instance network info cache due to event network-changed-9a17bc0d-88b6-4fce-95ef-62a0eb499feb. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1396.716058] env[61978]: DEBUG oslo_concurrency.lockutils [req-b91558c2-3968-40f8-ada6-e92a85114979 req-da2085d7-ad74-41d2-9ca2-df70016b68c1 service nova] Acquiring lock "refresh_cache-1ec56ce5-c580-4369-ac0a-59c0782ac570" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.716299] env[61978]: DEBUG oslo_concurrency.lockutils [req-b91558c2-3968-40f8-ada6-e92a85114979 req-da2085d7-ad74-41d2-9ca2-df70016b68c1 service nova] Acquired lock "refresh_cache-1ec56ce5-c580-4369-ac0a-59c0782ac570" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.716394] env[61978]: DEBUG nova.network.neutron [req-b91558c2-3968-40f8-ada6-e92a85114979 req-da2085d7-ad74-41d2-9ca2-df70016b68c1 service nova] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Refreshing network info cache for port 9a17bc0d-88b6-4fce-95ef-62a0eb499feb {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1396.777804] env[61978]: DEBUG nova.network.neutron [-] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.896660] env[61978]: DEBUG nova.compute.manager [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1396.952324] env[61978]: DEBUG nova.virt.hardware [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1396.952324] env[61978]: DEBUG nova.virt.hardware [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1396.952324] env[61978]: DEBUG nova.virt.hardware [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1396.952324] env[61978]: DEBUG nova.virt.hardware [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1396.952324] env[61978]: DEBUG nova.virt.hardware [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1396.952324] env[61978]: DEBUG nova.virt.hardware [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1396.952655] env[61978]: DEBUG nova.virt.hardware [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1396.952733] env[61978]: DEBUG nova.virt.hardware [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1396.952981] env[61978]: DEBUG nova.virt.hardware [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1396.953327] env[61978]: DEBUG nova.virt.hardware [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1396.953606] env[61978]: DEBUG nova.virt.hardware [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1396.954840] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccafc5e-e829-4557-845d-9e6fda29573b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.965677] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cbfcf7-f212-4389-a252-ddbec7f77584 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.025740] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 17c56c1c-9992-4559-ad23-c68909ae6792] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1397.102343] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396076, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.280442] env[61978]: INFO nova.compute.manager [-] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Took 1.30 seconds to deallocate network for instance. [ 1397.528957] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: fdd0c16d-b0f8-4f81-9069-34d11f273acb] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1397.602543] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396076, 'name': CreateVM_Task, 'duration_secs': 0.629229} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.602710] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1397.603368] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.603539] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.603877] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1397.604145] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3605e3e5-82fe-4abe-89cc-1322eef8ad84 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.608690] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Waiting for the task: (returnval){ [ 1397.608690] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5290e588-ff36-52ae-ce53-8bd19b8adbca" [ 1397.608690] env[61978]: _type = "Task" [ 1397.608690] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.615737] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5290e588-ff36-52ae-ce53-8bd19b8adbca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.643271] env[61978]: DEBUG nova.network.neutron [req-b91558c2-3968-40f8-ada6-e92a85114979 req-da2085d7-ad74-41d2-9ca2-df70016b68c1 service nova] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Updated VIF entry in instance network info cache for port 9a17bc0d-88b6-4fce-95ef-62a0eb499feb. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1397.644230] env[61978]: DEBUG nova.network.neutron [req-b91558c2-3968-40f8-ada6-e92a85114979 req-da2085d7-ad74-41d2-9ca2-df70016b68c1 service nova] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Updating instance_info_cache with network_info: [{"id": "9a17bc0d-88b6-4fce-95ef-62a0eb499feb", "address": "fa:16:3e:28:55:ae", "network": {"id": "7de5cb4e-63ba-4de9-94dc-84f8ead2ebd6", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1885745338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d8a5b19deec24b2f8dcbbbd6ed30deaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a17bc0d-88", "ovs_interfaceid": "9a17bc0d-88b6-4fce-95ef-62a0eb499feb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.656748] env[61978]: DEBUG nova.network.neutron [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Successfully updated port: e1688d68-33f0-48a2-8d22-475b9f9bacfb {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1397.786736] env[61978]: DEBUG oslo_concurrency.lockutils [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.787035] env[61978]: DEBUG oslo_concurrency.lockutils [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.787273] env[61978]: DEBUG nova.objects.instance [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Lazy-loading 'resources' on Instance uuid ce71756d-7a11-46d4-a5dd-a5b720df83c6 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1398.032230] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a1087abd-28d1-40ac-96ab-dc38392d027c] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1398.119271] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5290e588-ff36-52ae-ce53-8bd19b8adbca, 'name': SearchDatastore_Task, 'duration_secs': 0.009373} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.119590] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.119833] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1398.120083] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.120240] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.120425] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1398.120742] env[61978]: DEBUG nova.network.neutron [-] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.122040] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2315a438-be8e-43f9-947c-c1c8d2dfd348 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.130787] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1398.131045] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1398.131775] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0169f11-0113-4282-9c40-e59a254f18a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.138565] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Waiting for the task: (returnval){ [ 1398.138565] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]520140d8-7648-026d-a519-3f29afc14c1f" [ 1398.138565] env[61978]: _type = "Task" [ 1398.138565] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.146707] env[61978]: DEBUG oslo_concurrency.lockutils [req-b91558c2-3968-40f8-ada6-e92a85114979 req-da2085d7-ad74-41d2-9ca2-df70016b68c1 service nova] Releasing lock "refresh_cache-1ec56ce5-c580-4369-ac0a-59c0782ac570" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.146998] env[61978]: DEBUG nova.compute.manager [req-b91558c2-3968-40f8-ada6-e92a85114979 req-da2085d7-ad74-41d2-9ca2-df70016b68c1 service nova] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Received event network-vif-deleted-7e93119a-e6cd-401f-bb4a-c41a130cd596 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1398.147174] env[61978]: INFO nova.compute.manager [req-b91558c2-3968-40f8-ada6-e92a85114979 req-da2085d7-ad74-41d2-9ca2-df70016b68c1 service nova] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Neutron deleted interface 7e93119a-e6cd-401f-bb4a-c41a130cd596; detaching it from the instance and deleting it from the info cache [ 1398.147375] env[61978]: DEBUG nova.network.neutron [req-b91558c2-3968-40f8-ada6-e92a85114979 req-da2085d7-ad74-41d2-9ca2-df70016b68c1 service nova] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.148510] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520140d8-7648-026d-a519-3f29afc14c1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.157913] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.158071] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.158201] env[61978]: DEBUG nova.network.neutron [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1398.349110] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b0f4bf-641f-4baf-8fdb-f2e84f8d33d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.357768] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd3ccd9-2ec8-44be-b66d-c8e926da5217 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.399769] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a452cd0-11a1-4c08-84de-f80b1e5851a2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.407516] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b551fb-4043-4ba9-9ea3-ecf1b2ac450e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.420171] env[61978]: DEBUG nova.compute.provider_tree [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1398.535116] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: f4034944-3a9d-4e14-a545-0bf574465e0b] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1398.624608] env[61978]: INFO nova.compute.manager [-] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Took 1.94 seconds to deallocate network for instance. [ 1398.649581] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]520140d8-7648-026d-a519-3f29afc14c1f, 'name': SearchDatastore_Task, 'duration_secs': 0.008849} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.649841] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7870c9d-05ac-4ac8-b955-a18543d9fcea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.652253] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1c4a3d0-fc0c-4b40-a525-de7aee8062db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.657678] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Waiting for the task: (returnval){ [ 1398.657678] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ac0b10-6a57-84ac-1758-e75d3bd91cc4" [ 1398.657678] env[61978]: _type = "Task" [ 1398.657678] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.663951] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad338a40-41f3-4cca-a9ef-f6dd3792d0da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.681406] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ac0b10-6a57-84ac-1758-e75d3bd91cc4, 'name': SearchDatastore_Task, 'duration_secs': 0.008989} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.681644] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.681903] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 1ec56ce5-c580-4369-ac0a-59c0782ac570/1ec56ce5-c580-4369-ac0a-59c0782ac570.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1398.682149] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21f845ad-2375-4f38-b583-755cbcfc66e9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.688554] env[61978]: DEBUG nova.compute.manager [req-b91558c2-3968-40f8-ada6-e92a85114979 req-da2085d7-ad74-41d2-9ca2-df70016b68c1 service nova] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Detach interface failed, port_id=7e93119a-e6cd-401f-bb4a-c41a130cd596, reason: Instance ce71756d-7a11-46d4-a5dd-a5b720df83c6 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1398.692833] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Waiting for the task: (returnval){ [ 1398.692833] env[61978]: value = "task-1396077" [ 1398.692833] env[61978]: _type = "Task" [ 1398.692833] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.699917] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.704330] env[61978]: DEBUG nova.network.neutron [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1398.745467] env[61978]: DEBUG nova.compute.manager [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Received event network-vif-plugged-e1688d68-33f0-48a2-8d22-475b9f9bacfb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1398.746155] env[61978]: DEBUG oslo_concurrency.lockutils [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] Acquiring lock "30d6cc11-0258-47aa-b083-7c103c91acf2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.746398] env[61978]: DEBUG oslo_concurrency.lockutils [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] Lock "30d6cc11-0258-47aa-b083-7c103c91acf2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.746583] env[61978]: DEBUG oslo_concurrency.lockutils [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] Lock "30d6cc11-0258-47aa-b083-7c103c91acf2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.746796] env[61978]: DEBUG nova.compute.manager [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] No waiting events found dispatching network-vif-plugged-e1688d68-33f0-48a2-8d22-475b9f9bacfb {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1398.747022] env[61978]: WARNING nova.compute.manager [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Received unexpected event network-vif-plugged-e1688d68-33f0-48a2-8d22-475b9f9bacfb for instance with vm_state building and task_state spawning. [ 1398.747154] env[61978]: DEBUG nova.compute.manager [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Received event network-vif-deleted-2a16d335-2f9e-47f7-a83c-44777d05ca3b {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1398.747331] env[61978]: DEBUG nova.compute.manager [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Received event network-changed-e1688d68-33f0-48a2-8d22-475b9f9bacfb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1398.747492] env[61978]: DEBUG nova.compute.manager [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Refreshing instance network info cache due to event network-changed-e1688d68-33f0-48a2-8d22-475b9f9bacfb. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1398.747757] env[61978]: DEBUG oslo_concurrency.lockutils [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] Acquiring lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.858364] env[61978]: DEBUG nova.network.neutron [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Updating instance_info_cache with network_info: [{"id": "e1688d68-33f0-48a2-8d22-475b9f9bacfb", "address": "fa:16:3e:dd:fe:12", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1688d68-33", "ovs_interfaceid": "e1688d68-33f0-48a2-8d22-475b9f9bacfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.923140] env[61978]: DEBUG nova.scheduler.client.report [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1399.038254] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b932d221-aca9-4853-aa9c-2d27981e878c] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1399.131300] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.203435] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396077, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45434} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.203702] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 1ec56ce5-c580-4369-ac0a-59c0782ac570/1ec56ce5-c580-4369-ac0a-59c0782ac570.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1399.203914] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1399.204184] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c9c86e63-6771-436b-a05e-83d434a524b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.210027] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Waiting for the task: (returnval){ [ 1399.210027] env[61978]: value = "task-1396078" [ 1399.210027] env[61978]: _type = "Task" [ 1399.210027] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.217258] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396078, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.361019] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.361432] env[61978]: DEBUG nova.compute.manager [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Instance network_info: |[{"id": "e1688d68-33f0-48a2-8d22-475b9f9bacfb", "address": "fa:16:3e:dd:fe:12", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1688d68-33", "ovs_interfaceid": "e1688d68-33f0-48a2-8d22-475b9f9bacfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1399.361724] env[61978]: DEBUG oslo_concurrency.lockutils [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] Acquired lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.361917] env[61978]: DEBUG nova.network.neutron [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Refreshing network info cache for port e1688d68-33f0-48a2-8d22-475b9f9bacfb {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1399.363187] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:fe:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e1688d68-33f0-48a2-8d22-475b9f9bacfb', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1399.371319] env[61978]: DEBUG oslo.service.loopingcall [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1399.374121] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1399.374586] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d75bdf0a-858d-4b1c-824c-85321fe95e45 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.394871] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1399.394871] env[61978]: value = "task-1396079" [ 1399.394871] env[61978]: _type = "Task" [ 1399.394871] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.401999] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396079, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.428661] env[61978]: DEBUG oslo_concurrency.lockutils [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.641s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.430945] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.300s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.431065] env[61978]: DEBUG nova.objects.instance [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lazy-loading 'resources' on Instance uuid fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1399.447525] env[61978]: INFO nova.scheduler.client.report [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Deleted allocations for instance ce71756d-7a11-46d4-a5dd-a5b720df83c6 [ 1399.541571] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 0cdff646-34ad-49d5-b775-28e8e7ce778e] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1399.586223] env[61978]: DEBUG nova.network.neutron [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Updated VIF entry in instance network info cache for port e1688d68-33f0-48a2-8d22-475b9f9bacfb. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1399.586583] env[61978]: DEBUG nova.network.neutron [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Updating instance_info_cache with network_info: [{"id": "e1688d68-33f0-48a2-8d22-475b9f9bacfb", "address": "fa:16:3e:dd:fe:12", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1688d68-33", "ovs_interfaceid": "e1688d68-33f0-48a2-8d22-475b9f9bacfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.719881] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396078, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061681} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.720236] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1399.721011] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7368a20-5e1d-459d-ac3b-8a9fbe10db28 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.743130] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 1ec56ce5-c580-4369-ac0a-59c0782ac570/1ec56ce5-c580-4369-ac0a-59c0782ac570.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1399.743447] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb18e2a1-c928-4143-936d-c58fbb3c0da2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.762386] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Waiting for the task: (returnval){ [ 1399.762386] env[61978]: value = "task-1396080" [ 1399.762386] env[61978]: _type = "Task" [ 1399.762386] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.769921] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396080, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.905611] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396079, 'name': CreateVM_Task, 'duration_secs': 0.345589} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.905805] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1399.906568] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.906777] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.907161] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1399.907437] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9094d523-151a-4ea2-ba17-602de89523b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.911916] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1399.911916] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c4965f-0320-1f72-09e1-e41d91d04795" [ 1399.911916] env[61978]: _type = "Task" [ 1399.911916] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.919906] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c4965f-0320-1f72-09e1-e41d91d04795, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.961038] env[61978]: DEBUG oslo_concurrency.lockutils [None req-95375fd2-f4fa-4217-bf49-ff3653b041a0 tempest-ServerPasswordTestJSON-1506613154 tempest-ServerPasswordTestJSON-1506613154-project-member] Lock "ce71756d-7a11-46d4-a5dd-a5b720df83c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.104s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.980617] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bfaf0b-0f53-40a2-91e6-1f15c3570899 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.990528] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c398a099-cd03-4ae4-a325-dab60b3f2b64 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.022466] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8277256c-7178-41d8-969d-1027ea424af6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.029435] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44a3596-af58-4099-a695-5c4e7de9fca7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.042265] env[61978]: DEBUG nova.compute.provider_tree [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1400.044237] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 9ee04ee8-98ec-4be9-935d-cad7cd176466] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1400.088750] env[61978]: DEBUG oslo_concurrency.lockutils [req-474f425d-3edc-4677-a4f0-88c4b9eaa0af req-cd96f7d4-174c-4798-b9dc-258009394086 service nova] Releasing lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.273168] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396080, 'name': ReconfigVM_Task, 'duration_secs': 0.257474} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.273465] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 1ec56ce5-c580-4369-ac0a-59c0782ac570/1ec56ce5-c580-4369-ac0a-59c0782ac570.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1400.274105] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d1c67ae-cdf5-4e16-8545-57eb7a30fef0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.280871] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Waiting for the task: (returnval){ [ 1400.280871] env[61978]: value = "task-1396081" [ 1400.280871] env[61978]: _type = "Task" [ 1400.280871] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.288026] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396081, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.422500] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c4965f-0320-1f72-09e1-e41d91d04795, 'name': SearchDatastore_Task, 'duration_secs': 0.009301} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.422853] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.423113] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1400.423361] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.423516] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.423699] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1400.423969] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec173417-a465-4521-8b5a-64b45e802903 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.431678] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1400.431858] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1400.432570] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dacd2bdb-81b1-408b-9bc4-c1cc565811ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.437764] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1400.437764] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526ba13c-fa9f-f1bf-8d4c-ba8e0fb17a92" [ 1400.437764] env[61978]: _type = "Task" [ 1400.437764] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.445652] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526ba13c-fa9f-f1bf-8d4c-ba8e0fb17a92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.548373] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 4c7053ee-7c44-49ee-8d30-bf14686c6b1c] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1400.575257] env[61978]: DEBUG nova.scheduler.client.report [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 161 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1400.575544] env[61978]: DEBUG nova.compute.provider_tree [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 161 to 162 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1400.576096] env[61978]: DEBUG nova.compute.provider_tree [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1400.791132] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396081, 'name': Rename_Task, 'duration_secs': 0.139118} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.791427] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1400.791684] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-873ba5d7-343a-46e0-91c2-ecb0002d0d49 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.798094] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Waiting for the task: (returnval){ [ 1400.798094] env[61978]: value = "task-1396082" [ 1400.798094] env[61978]: _type = "Task" [ 1400.798094] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.805686] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396082, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.948506] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526ba13c-fa9f-f1bf-8d4c-ba8e0fb17a92, 'name': SearchDatastore_Task, 'duration_secs': 0.008216} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.949308] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0af3a8b-86d0-4d32-943b-5ca07afe30e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.954611] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1400.954611] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523f0de6-87aa-3eb5-afa4-a9ae440a5121" [ 1400.954611] env[61978]: _type = "Task" [ 1400.954611] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.962957] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523f0de6-87aa-3eb5-afa4-a9ae440a5121, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.051803] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b356fc81-f857-4416-8eb0-28c66d137967] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1401.081418] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.650s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.104218] env[61978]: INFO nova.scheduler.client.report [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Deleted allocations for instance fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd [ 1401.308635] env[61978]: DEBUG oslo_vmware.api [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396082, 'name': PowerOnVM_Task, 'duration_secs': 0.435494} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.309092] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1401.309310] env[61978]: INFO nova.compute.manager [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Took 6.82 seconds to spawn the instance on the hypervisor. [ 1401.309498] env[61978]: DEBUG nova.compute.manager [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1401.310289] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789ce4a3-df86-466c-a3b4-9319709fc5ae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.464707] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523f0de6-87aa-3eb5-afa4-a9ae440a5121, 'name': SearchDatastore_Task, 'duration_secs': 0.009173} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.465007] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.465294] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 30d6cc11-0258-47aa-b083-7c103c91acf2/30d6cc11-0258-47aa-b083-7c103c91acf2.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1401.465576] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-358fa29a-3b4b-4d05-b256-2228de9ab5e1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.472741] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1401.472741] env[61978]: value = "task-1396083" [ 1401.472741] env[61978]: _type = "Task" [ 1401.472741] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.481122] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.555315] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 59f32dd0-1faa-4059-9ef3-b177e8f4fa4c] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1401.613469] env[61978]: DEBUG oslo_concurrency.lockutils [None req-fbaf7a20-8a91-4e7d-8770-bf373320058d tempest-ServerActionsTestOtherB-375006338 tempest-ServerActionsTestOtherB-375006338-project-member] Lock "fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.043s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.829776] env[61978]: INFO nova.compute.manager [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Took 14.67 seconds to build instance. [ 1401.982636] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396083, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467889} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.982939] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 30d6cc11-0258-47aa-b083-7c103c91acf2/30d6cc11-0258-47aa-b083-7c103c91acf2.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1401.983218] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1401.983506] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0cc71a16-1693-4db8-8751-feca1f8b6239 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.989372] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1401.989372] env[61978]: value = "task-1396085" [ 1401.989372] env[61978]: _type = "Task" [ 1401.989372] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.996587] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396085, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.058311] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: f1001633-e4e5-4de1-8a6b-cf653e43d821] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1402.331834] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3a7e62fa-5484-497f-8296-e5cafd4a93cc tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Lock "1ec56ce5-c580-4369-ac0a-59c0782ac570" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.183s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.499302] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396085, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.183801} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.499555] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1402.500368] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0e52cf-65e6-4d5f-9b3b-b0b60c09959e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.522073] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 30d6cc11-0258-47aa-b083-7c103c91acf2/30d6cc11-0258-47aa-b083-7c103c91acf2.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1402.522861] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5198a09-e24c-48c7-bb4e-bda068966462 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.542229] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1402.542229] env[61978]: value = "task-1396086" [ 1402.542229] env[61978]: _type = "Task" [ 1402.542229] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.549788] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396086, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.561412] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: bdfdd685-e440-4f53-b6c4-2ee2f06acba8] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1402.726370] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Acquiring lock "1ec56ce5-c580-4369-ac0a-59c0782ac570" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.726751] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Lock "1ec56ce5-c580-4369-ac0a-59c0782ac570" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.726987] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Acquiring lock "1ec56ce5-c580-4369-ac0a-59c0782ac570-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.727204] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Lock "1ec56ce5-c580-4369-ac0a-59c0782ac570-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.727387] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Lock "1ec56ce5-c580-4369-ac0a-59c0782ac570-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.730479] env[61978]: INFO nova.compute.manager [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Terminating instance [ 1402.732551] env[61978]: DEBUG nova.compute.manager [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1402.733138] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1402.734211] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14b5c38-bc90-4f75-97da-e9397cd51726 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.742869] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1402.743151] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f1ebfa9-41ff-470f-b359-5a704244fad5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.751293] env[61978]: DEBUG oslo_vmware.api [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Waiting for the task: (returnval){ [ 1402.751293] env[61978]: value = "task-1396087" [ 1402.751293] env[61978]: _type = "Task" [ 1402.751293] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.760487] env[61978]: DEBUG oslo_vmware.api [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.052690] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396086, 'name': ReconfigVM_Task, 'duration_secs': 0.299197} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.053032] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 30d6cc11-0258-47aa-b083-7c103c91acf2/30d6cc11-0258-47aa-b083-7c103c91acf2.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1403.053681] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ee07533-d158-4d36-bc73-bd7d9834e13d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.060421] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1403.060421] env[61978]: value = "task-1396088" [ 1403.060421] env[61978]: _type = "Task" [ 1403.060421] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.065064] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 8ffb8bea-9bb4-4b82-9716-7be99eb8c4d7] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1403.070460] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396088, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.262681] env[61978]: DEBUG oslo_vmware.api [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396087, 'name': PowerOffVM_Task, 'duration_secs': 0.348641} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.263055] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1403.263321] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1403.263683] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af00007a-5b72-4613-86ef-305499c20a98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.342831] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1403.343313] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1403.343313] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Deleting the datastore file [datastore2] 1ec56ce5-c580-4369-ac0a-59c0782ac570 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1403.343587] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4598c99-c620-404a-af10-c762c8b8bd0d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.350425] env[61978]: DEBUG oslo_vmware.api [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Waiting for the task: (returnval){ [ 1403.350425] env[61978]: value = "task-1396090" [ 1403.350425] env[61978]: _type = "Task" [ 1403.350425] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.360350] env[61978]: DEBUG oslo_vmware.api [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396090, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.570853] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396088, 'name': Rename_Task, 'duration_secs': 0.142579} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.571296] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 9bee3e66-93b5-4c0f-bb46-8fbd78c312c0] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1403.573086] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1403.573526] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd1eb24f-d716-48cc-8684-5d39836023dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.579611] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1403.579611] env[61978]: value = "task-1396091" [ 1403.579611] env[61978]: _type = "Task" [ 1403.579611] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.589601] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396091, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.864264] env[61978]: DEBUG oslo_vmware.api [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Task: {'id': task-1396090, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15849} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.864596] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1403.864794] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1403.864974] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1403.865313] env[61978]: INFO nova.compute.manager [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1403.865444] env[61978]: DEBUG oslo.service.loopingcall [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1403.865648] env[61978]: DEBUG nova.compute.manager [-] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1403.865744] env[61978]: DEBUG nova.network.neutron [-] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1404.074192] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: f3c837fb-be7e-40a6-aae4-7f213c62ab2c] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1404.091699] env[61978]: DEBUG oslo_vmware.api [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396091, 'name': PowerOnVM_Task, 'duration_secs': 0.446533} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.092077] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1404.092366] env[61978]: INFO nova.compute.manager [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Took 7.20 seconds to spawn the instance on the hypervisor. [ 1404.092653] env[61978]: DEBUG nova.compute.manager [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1404.093468] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cab075b-84a5-4e77-a379-a21309271327 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.348906] env[61978]: DEBUG nova.compute.manager [req-2c7ba365-2e7c-4130-90fd-f411d1c1ea1c req-6001a203-7032-47f0-97fc-7ba489ff73dc service nova] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Received event network-vif-deleted-9a17bc0d-88b6-4fce-95ef-62a0eb499feb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1404.348906] env[61978]: INFO nova.compute.manager [req-2c7ba365-2e7c-4130-90fd-f411d1c1ea1c req-6001a203-7032-47f0-97fc-7ba489ff73dc service nova] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Neutron deleted interface 9a17bc0d-88b6-4fce-95ef-62a0eb499feb; detaching it from the instance and deleting it from the info cache [ 1404.349316] env[61978]: DEBUG nova.network.neutron [req-2c7ba365-2e7c-4130-90fd-f411d1c1ea1c req-6001a203-7032-47f0-97fc-7ba489ff73dc service nova] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.579612] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: c17c986e-c008-4414-8dd1-4ea836458048] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1404.615920] env[61978]: INFO nova.compute.manager [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Took 11.92 seconds to build instance. [ 1404.825960] env[61978]: DEBUG nova.network.neutron [-] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.855866] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d49f5cb2-84c7-4c46-adb6-84834dcd47e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.862952] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a006f3ed-128e-449c-b671-5139bf597803 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.888310] env[61978]: DEBUG nova.compute.manager [req-2c7ba365-2e7c-4130-90fd-f411d1c1ea1c req-6001a203-7032-47f0-97fc-7ba489ff73dc service nova] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Detach interface failed, port_id=9a17bc0d-88b6-4fce-95ef-62a0eb499feb, reason: Instance 1ec56ce5-c580-4369-ac0a-59c0782ac570 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1405.083897] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 96bef3f3-a45c-43ba-a86a-66c1d5686ea6] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1405.117904] env[61978]: DEBUG oslo_concurrency.lockutils [None req-673f7e37-760a-4b95-a6a4-ef1c2573a41d tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "30d6cc11-0258-47aa-b083-7c103c91acf2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.428s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.188697] env[61978]: DEBUG nova.compute.manager [req-d6373536-66f3-4fdd-96a8-44b2c717d85f req-00c79f6a-5f62-4ceb-beae-912e1cc2f0fb service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Received event network-changed-e1688d68-33f0-48a2-8d22-475b9f9bacfb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1405.188971] env[61978]: DEBUG nova.compute.manager [req-d6373536-66f3-4fdd-96a8-44b2c717d85f req-00c79f6a-5f62-4ceb-beae-912e1cc2f0fb service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Refreshing instance network info cache due to event network-changed-e1688d68-33f0-48a2-8d22-475b9f9bacfb. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1405.189209] env[61978]: DEBUG oslo_concurrency.lockutils [req-d6373536-66f3-4fdd-96a8-44b2c717d85f req-00c79f6a-5f62-4ceb-beae-912e1cc2f0fb service nova] Acquiring lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.189327] env[61978]: DEBUG oslo_concurrency.lockutils [req-d6373536-66f3-4fdd-96a8-44b2c717d85f req-00c79f6a-5f62-4ceb-beae-912e1cc2f0fb service nova] Acquired lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.189502] env[61978]: DEBUG nova.network.neutron [req-d6373536-66f3-4fdd-96a8-44b2c717d85f req-00c79f6a-5f62-4ceb-beae-912e1cc2f0fb service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Refreshing network info cache for port e1688d68-33f0-48a2-8d22-475b9f9bacfb {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1405.326477] env[61978]: INFO nova.compute.manager [-] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Took 1.46 seconds to deallocate network for instance. [ 1405.587547] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 243e7146-46fc-43f4-a83b-cdc58f397f9e] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1405.833159] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.833442] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.833661] env[61978]: DEBUG nova.objects.instance [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Lazy-loading 'resources' on Instance uuid 1ec56ce5-c580-4369-ac0a-59c0782ac570 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1405.965045] env[61978]: DEBUG nova.network.neutron [req-d6373536-66f3-4fdd-96a8-44b2c717d85f req-00c79f6a-5f62-4ceb-beae-912e1cc2f0fb service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Updated VIF entry in instance network info cache for port e1688d68-33f0-48a2-8d22-475b9f9bacfb. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1405.965821] env[61978]: DEBUG nova.network.neutron [req-d6373536-66f3-4fdd-96a8-44b2c717d85f req-00c79f6a-5f62-4ceb-beae-912e1cc2f0fb service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Updating instance_info_cache with network_info: [{"id": "e1688d68-33f0-48a2-8d22-475b9f9bacfb", "address": "fa:16:3e:dd:fe:12", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1688d68-33", "ovs_interfaceid": "e1688d68-33f0-48a2-8d22-475b9f9bacfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.054834] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "6a454083-8d85-4a29-98dc-29eb0a072560" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.055240] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "6a454083-8d85-4a29-98dc-29eb0a072560" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.091424] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 92eb5edb-803b-48d4-8c4f-338d7c3b3d13] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1406.395532] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1229469a-4396-4b28-9049-8e68ef3b2984 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.403668] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28da34f-1b2c-423d-839e-09971e3a353d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.434035] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9c7ee0-ffd0-4692-b69d-92a40ab0db54 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.441597] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61e35e5-3662-4b3e-9b38-27ae61418218 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.454697] env[61978]: DEBUG nova.compute.provider_tree [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1406.470169] env[61978]: DEBUG oslo_concurrency.lockutils [req-d6373536-66f3-4fdd-96a8-44b2c717d85f req-00c79f6a-5f62-4ceb-beae-912e1cc2f0fb service nova] Releasing lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.557776] env[61978]: DEBUG nova.compute.manager [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1406.594358] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: b26a4784-698d-477a-8db7-58156899d231] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1406.975647] env[61978]: ERROR nova.scheduler.client.report [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [req-89fb7572-7ad7-4c93-bd53-92cb83f91d85] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-89fb7572-7ad7-4c93-bd53-92cb83f91d85"}]} [ 1406.990652] env[61978]: DEBUG nova.scheduler.client.report [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1407.001630] env[61978]: DEBUG nova.scheduler.client.report [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1407.001795] env[61978]: DEBUG nova.compute.provider_tree [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1407.011212] env[61978]: DEBUG nova.scheduler.client.report [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1407.025371] env[61978]: DEBUG nova.scheduler.client.report [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1407.067066] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b2befa-1d33-4147-aa23-60959131f3d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.074138] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.075112] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0470c30-7cb4-46f9-8f61-8c2538aaf474 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.104048] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.105265] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714a2491-8510-45ae-a7d1-312c5143a3ca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.113133] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae714fa-9df4-4654-9710-a07abeadd6e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.127342] env[61978]: DEBUG nova.compute.provider_tree [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1407.530323] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Acquiring lock "86f776d7-1ace-4e3c-8fa9-1562b97c832c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.530323] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Lock "86f776d7-1ace-4e3c-8fa9-1562b97c832c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.648478] env[61978]: ERROR nova.scheduler.client.report [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] [req-694e0b6c-24ec-47f8-85b4-e4f72cb78dd8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-694e0b6c-24ec-47f8-85b4-e4f72cb78dd8"}]} [ 1407.665191] env[61978]: DEBUG nova.scheduler.client.report [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1407.680294] env[61978]: DEBUG nova.scheduler.client.report [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1407.680538] env[61978]: DEBUG nova.compute.provider_tree [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1407.692084] env[61978]: DEBUG nova.scheduler.client.report [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1407.712430] env[61978]: DEBUG nova.scheduler.client.report [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1407.771847] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec50a8b-146f-4889-bb6d-4ea00f305f64 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.778919] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a91544-3278-4892-b7ac-10a06cb238fe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.807934] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6143a3a2-1573-4956-9d21-32d6e5e8d068 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.814645] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1359a499-7e1a-46e8-b6aa-a5698fc3cc3a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.827166] env[61978]: DEBUG nova.compute.provider_tree [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1408.033331] env[61978]: DEBUG nova.compute.manager [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1408.364749] env[61978]: DEBUG nova.scheduler.client.report [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 164 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1408.364749] env[61978]: DEBUG nova.compute.provider_tree [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 164 to 165 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1408.364749] env[61978]: DEBUG nova.compute.provider_tree [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1408.551330] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.870042] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.036s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1408.872700] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.799s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.874281] env[61978]: INFO nova.compute.claims [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1408.889296] env[61978]: INFO nova.scheduler.client.report [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Deleted allocations for instance 1ec56ce5-c580-4369-ac0a-59c0782ac570 [ 1409.395353] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9081acd2-fe31-40fb-be80-35bf47030f46 tempest-ServerAddressesTestJSON-1658196044 tempest-ServerAddressesTestJSON-1658196044-project-member] Lock "1ec56ce5-c580-4369-ac0a-59c0782ac570" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.669s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1409.929997] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc263489-8dd9-411d-b80b-d35001aa4b76 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.937478] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df0780e-75b7-494e-b71b-3d0e7c91f946 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.967062] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876e7a34-7731-44ea-a213-49349c740afd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.973565] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbbacb0-49ef-427f-bd3a-f1254a25ab32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.986078] env[61978]: DEBUG nova.compute.provider_tree [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1410.491340] env[61978]: DEBUG nova.scheduler.client.report [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1410.997124] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.124s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.997825] env[61978]: DEBUG nova.compute.manager [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1411.003183] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.452s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.006252] env[61978]: INFO nova.compute.claims [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1411.510844] env[61978]: DEBUG nova.compute.utils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1411.514160] env[61978]: DEBUG nova.compute.manager [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1411.514270] env[61978]: DEBUG nova.network.neutron [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1411.564394] env[61978]: DEBUG nova.policy [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0d311a20433495487d3926eb92ce91c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8f40d19e7c74ade886c322a78583545', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1411.848895] env[61978]: DEBUG nova.network.neutron [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Successfully created port: 6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1412.015799] env[61978]: DEBUG nova.compute.manager [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1412.079395] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6080f7-8a92-4ac4-bc26-9baa1257f61b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.086962] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16fac485-e276-49dd-ae63-37cedcc53239 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.116409] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19eb2c09-7834-45c5-90a5-67febd3a383a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.123509] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420e055e-971f-45e1-8a6c-28e7744a0755 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.136741] env[61978]: DEBUG nova.compute.provider_tree [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1412.640097] env[61978]: DEBUG nova.scheduler.client.report [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1413.027388] env[61978]: DEBUG nova.compute.manager [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1413.054351] env[61978]: DEBUG nova.virt.hardware [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1413.054628] env[61978]: DEBUG nova.virt.hardware [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1413.054798] env[61978]: DEBUG nova.virt.hardware [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1413.055038] env[61978]: DEBUG nova.virt.hardware [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1413.055204] env[61978]: DEBUG nova.virt.hardware [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1413.055355] env[61978]: DEBUG nova.virt.hardware [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1413.055582] env[61978]: DEBUG nova.virt.hardware [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1413.055744] env[61978]: DEBUG nova.virt.hardware [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1413.055918] env[61978]: DEBUG nova.virt.hardware [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1413.056610] env[61978]: DEBUG nova.virt.hardware [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1413.056944] env[61978]: DEBUG nova.virt.hardware [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1413.057701] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1997e1e-94e0-494f-a1be-45af284557da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.066033] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56baefd5-23c1-4e74-9df1-0ecd2cf60b67 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.148254] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.142s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.148254] env[61978]: DEBUG nova.compute.manager [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1413.272734] env[61978]: DEBUG nova.compute.manager [req-02c671d1-3121-4914-89fb-586c06547300 req-9296f3ab-a400-44b7-b9ac-9bdea07a54b6 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Received event network-vif-plugged-6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1413.272734] env[61978]: DEBUG oslo_concurrency.lockutils [req-02c671d1-3121-4914-89fb-586c06547300 req-9296f3ab-a400-44b7-b9ac-9bdea07a54b6 service nova] Acquiring lock "6a454083-8d85-4a29-98dc-29eb0a072560-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.273698] env[61978]: DEBUG oslo_concurrency.lockutils [req-02c671d1-3121-4914-89fb-586c06547300 req-9296f3ab-a400-44b7-b9ac-9bdea07a54b6 service nova] Lock "6a454083-8d85-4a29-98dc-29eb0a072560-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.274132] env[61978]: DEBUG oslo_concurrency.lockutils [req-02c671d1-3121-4914-89fb-586c06547300 req-9296f3ab-a400-44b7-b9ac-9bdea07a54b6 service nova] Lock "6a454083-8d85-4a29-98dc-29eb0a072560-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.274770] env[61978]: DEBUG nova.compute.manager [req-02c671d1-3121-4914-89fb-586c06547300 req-9296f3ab-a400-44b7-b9ac-9bdea07a54b6 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] No waiting events found dispatching network-vif-plugged-6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1413.275075] env[61978]: WARNING nova.compute.manager [req-02c671d1-3121-4914-89fb-586c06547300 req-9296f3ab-a400-44b7-b9ac-9bdea07a54b6 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Received unexpected event network-vif-plugged-6c4911ae-3f5e-46c7-9538-7e1a9811252e for instance with vm_state building and task_state spawning. [ 1413.369445] env[61978]: DEBUG nova.network.neutron [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Successfully updated port: 6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1413.651749] env[61978]: DEBUG nova.compute.utils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1413.653488] env[61978]: DEBUG nova.compute.manager [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Not allocating networking since 'none' was specified. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1413.870129] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.870129] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.870129] env[61978]: DEBUG nova.network.neutron [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1414.155246] env[61978]: DEBUG nova.compute.manager [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1414.406336] env[61978]: DEBUG nova.network.neutron [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1414.550033] env[61978]: DEBUG nova.network.neutron [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updating instance_info_cache with network_info: [{"id": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "address": "fa:16:3e:c3:c6:fc", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c4911ae-3f", "ovs_interfaceid": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.053931] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.054288] env[61978]: DEBUG nova.compute.manager [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Instance network_info: |[{"id": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "address": "fa:16:3e:c3:c6:fc", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c4911ae-3f", "ovs_interfaceid": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1415.054739] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:c6:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c4911ae-3f5e-46c7-9538-7e1a9811252e', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1415.062045] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating folder: Project (a8f40d19e7c74ade886c322a78583545). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.062716] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b57d33d-0339-458d-9db0-4e4ab164aedc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.074296] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Created folder: Project (a8f40d19e7c74ade886c322a78583545) in parent group-v295764. [ 1415.074492] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating folder: Instances. Parent ref: group-v296061. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.074720] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25b1708e-da58-41af-b709-002136effcb2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.083498] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Created folder: Instances in parent group-v296061. [ 1415.083729] env[61978]: DEBUG oslo.service.loopingcall [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1415.083915] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1415.084127] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc5eee19-8aaf-4774-ba8a-ca15ab00bc20 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.102895] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1415.102895] env[61978]: value = "task-1396094" [ 1415.102895] env[61978]: _type = "Task" [ 1415.102895] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.112795] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396094, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.167025] env[61978]: DEBUG nova.compute.manager [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1415.197062] env[61978]: DEBUG nova.virt.hardware [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1415.197062] env[61978]: DEBUG nova.virt.hardware [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1415.197062] env[61978]: DEBUG nova.virt.hardware [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1415.197062] env[61978]: DEBUG nova.virt.hardware [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1415.197771] env[61978]: DEBUG nova.virt.hardware [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1415.198268] env[61978]: DEBUG nova.virt.hardware [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1415.201023] env[61978]: DEBUG nova.virt.hardware [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1415.201023] env[61978]: DEBUG nova.virt.hardware [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1415.201023] env[61978]: DEBUG nova.virt.hardware [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1415.201023] env[61978]: DEBUG nova.virt.hardware [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1415.201023] env[61978]: DEBUG nova.virt.hardware [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1415.201023] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f555eb1c-107b-4ba3-b869-bbee2aae4ebf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.209990] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.210409] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.215561] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5daa35-0ea8-4612-806e-dfd96b1e4513 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.230801] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Instance VIF info [] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1415.236373] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Creating folder: Project (3b877d5d10024215aa0bff14cbc68e30). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.237048] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be97c0d0-eb61-445c-8427-d3ae781eb1a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.247620] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Created folder: Project (3b877d5d10024215aa0bff14cbc68e30) in parent group-v295764. [ 1415.248207] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Creating folder: Instances. Parent ref: group-v296064. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.248574] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a5e6c4d-e0b5-4ea7-833f-ab9a249114ac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.259022] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Created folder: Instances in parent group-v296064. [ 1415.259022] env[61978]: DEBUG oslo.service.loopingcall [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1415.259022] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1415.259022] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c428308b-33ac-464d-8334-5457d8ab8131 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.275267] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1415.275267] env[61978]: value = "task-1396097" [ 1415.275267] env[61978]: _type = "Task" [ 1415.275267] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.283889] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396097, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.304760] env[61978]: DEBUG nova.compute.manager [req-69bf9b80-2357-4925-8eec-5096aa1fe402 req-d70a2c3f-82f6-4a9d-9170-de03c85974a6 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Received event network-changed-6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1415.305197] env[61978]: DEBUG nova.compute.manager [req-69bf9b80-2357-4925-8eec-5096aa1fe402 req-d70a2c3f-82f6-4a9d-9170-de03c85974a6 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Refreshing instance network info cache due to event network-changed-6c4911ae-3f5e-46c7-9538-7e1a9811252e. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1415.305625] env[61978]: DEBUG oslo_concurrency.lockutils [req-69bf9b80-2357-4925-8eec-5096aa1fe402 req-d70a2c3f-82f6-4a9d-9170-de03c85974a6 service nova] Acquiring lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.306203] env[61978]: DEBUG oslo_concurrency.lockutils [req-69bf9b80-2357-4925-8eec-5096aa1fe402 req-d70a2c3f-82f6-4a9d-9170-de03c85974a6 service nova] Acquired lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.307965] env[61978]: DEBUG nova.network.neutron [req-69bf9b80-2357-4925-8eec-5096aa1fe402 req-d70a2c3f-82f6-4a9d-9170-de03c85974a6 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Refreshing network info cache for port 6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1415.612969] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396094, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.715725] env[61978]: DEBUG nova.compute.manager [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1415.785143] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396097, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.987773] env[61978]: DEBUG nova.network.neutron [req-69bf9b80-2357-4925-8eec-5096aa1fe402 req-d70a2c3f-82f6-4a9d-9170-de03c85974a6 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updated VIF entry in instance network info cache for port 6c4911ae-3f5e-46c7-9538-7e1a9811252e. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1415.988182] env[61978]: DEBUG nova.network.neutron [req-69bf9b80-2357-4925-8eec-5096aa1fe402 req-d70a2c3f-82f6-4a9d-9170-de03c85974a6 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updating instance_info_cache with network_info: [{"id": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "address": "fa:16:3e:c3:c6:fc", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c4911ae-3f", "ovs_interfaceid": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.112385] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396094, 'name': CreateVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.239252] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.239539] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.241161] env[61978]: INFO nova.compute.claims [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1416.288486] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396097, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.491367] env[61978]: DEBUG oslo_concurrency.lockutils [req-69bf9b80-2357-4925-8eec-5096aa1fe402 req-d70a2c3f-82f6-4a9d-9170-de03c85974a6 service nova] Releasing lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.613731] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396094, 'name': CreateVM_Task, 'duration_secs': 1.285759} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.613972] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1416.614685] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.614872] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.615223] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1416.615483] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1b88a14-eab5-4ae3-9dec-6a5d3d1f8755 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.619791] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1416.619791] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523c7a83-eeb1-2b65-f839-b63418dd245b" [ 1416.619791] env[61978]: _type = "Task" [ 1416.619791] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.626826] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523c7a83-eeb1-2b65-f839-b63418dd245b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.786558] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396097, 'name': CreateVM_Task, 'duration_secs': 1.234824} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.786726] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1416.787245] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.129818] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523c7a83-eeb1-2b65-f839-b63418dd245b, 'name': SearchDatastore_Task, 'duration_secs': 0.00838} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.130190] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.130372] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1417.130604] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.130758] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.130945] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1417.131235] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.131537] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1417.131757] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebfd9ff1-2248-4163-8941-798ef7b65dbc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.133410] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c6ccfee-d3a2-4002-94a2-61283d0343d1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.137728] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Waiting for the task: (returnval){ [ 1417.137728] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5284618d-1b37-095f-e58a-751924b80e21" [ 1417.137728] env[61978]: _type = "Task" [ 1417.137728] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.141971] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1417.142169] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1417.143079] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1960ed72-8d9f-4520-b366-3752b571d6cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.147684] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5284618d-1b37-095f-e58a-751924b80e21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.150329] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1417.150329] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523df06d-9ca4-9255-afc6-d7d65627697d" [ 1417.150329] env[61978]: _type = "Task" [ 1417.150329] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.157159] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523df06d-9ca4-9255-afc6-d7d65627697d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.304451] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0acea23-979d-4d7d-867b-a5b072c3b78e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.312118] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fa9158-5af9-4f85-89c0-188dc665d844 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.340438] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53bb0fde-9ba3-492a-878d-21cadaf17abf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.346940] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5ac5d9-7415-4e98-aee1-ea9a35a4cae4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.361016] env[61978]: DEBUG nova.compute.provider_tree [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1417.649234] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5284618d-1b37-095f-e58a-751924b80e21, 'name': SearchDatastore_Task, 'duration_secs': 0.01574} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.649585] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.649864] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1417.650150] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.659302] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523df06d-9ca4-9255-afc6-d7d65627697d, 'name': SearchDatastore_Task, 'duration_secs': 0.007479} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.660012] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae7b6ad6-57ac-48aa-94e9-ec3bf588ee27 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.664498] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1417.664498] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5250d2cd-2ddc-0021-3218-bccb95a3486b" [ 1417.664498] env[61978]: _type = "Task" [ 1417.664498] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.671442] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5250d2cd-2ddc-0021-3218-bccb95a3486b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.864084] env[61978]: DEBUG nova.scheduler.client.report [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1418.175079] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5250d2cd-2ddc-0021-3218-bccb95a3486b, 'name': SearchDatastore_Task, 'duration_secs': 0.008483} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.175426] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.175612] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 6a454083-8d85-4a29-98dc-29eb0a072560/6a454083-8d85-4a29-98dc-29eb0a072560.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1418.175946] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.176161] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1418.176383] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45400e0b-e77f-484f-bbd8-3348db5c952a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.178358] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcdb4c6a-81e8-412d-b444-8cfe731618e7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.184093] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1418.184093] env[61978]: value = "task-1396098" [ 1418.184093] env[61978]: _type = "Task" [ 1418.184093] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.187537] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1418.187748] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1418.188680] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd882278-6c95-45d7-a510-46e963457038 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.193416] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.196147] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Waiting for the task: (returnval){ [ 1418.196147] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c1622a-a334-f90a-4eef-49e799b28552" [ 1418.196147] env[61978]: _type = "Task" [ 1418.196147] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.202875] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c1622a-a334-f90a-4eef-49e799b28552, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.369386] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.130s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.369960] env[61978]: DEBUG nova.compute.manager [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1418.693459] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396098, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464809} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.693728] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 6a454083-8d85-4a29-98dc-29eb0a072560/6a454083-8d85-4a29-98dc-29eb0a072560.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1418.693997] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1418.694301] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c3cc141-2189-4172-ae02-21aff6793df6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.701147] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1418.701147] env[61978]: value = "task-1396099" [ 1418.701147] env[61978]: _type = "Task" [ 1418.701147] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.707658] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52c1622a-a334-f90a-4eef-49e799b28552, 'name': SearchDatastore_Task, 'duration_secs': 0.009216} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.708876] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fae9e2b-f012-429e-ae73-ae507b66dcfe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.714580] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396099, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.715853] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Waiting for the task: (returnval){ [ 1418.715853] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]524178cb-81be-dd45-4abe-547ffe87b671" [ 1418.715853] env[61978]: _type = "Task" [ 1418.715853] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.722679] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524178cb-81be-dd45-4abe-547ffe87b671, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.875706] env[61978]: DEBUG nova.compute.utils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1418.877317] env[61978]: DEBUG nova.compute.manager [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1418.877475] env[61978]: DEBUG nova.network.neutron [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1418.919187] env[61978]: DEBUG nova.policy [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd72a836e3aef4b59b1092b91f33fd929', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b289cdad1fe4ad38c5d987680be2367', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1419.181201] env[61978]: DEBUG nova.network.neutron [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Successfully created port: 204d4c58-f413-4204-b406-205812a3832d {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1419.212380] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396099, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065097} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.212670] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1419.213523] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f775f8-7401-4b18-a6bf-13e1919e25ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.237101] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 6a454083-8d85-4a29-98dc-29eb0a072560/6a454083-8d85-4a29-98dc-29eb0a072560.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1419.237747] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-814e4588-80a0-412d-9f2a-1770ce4cd5e7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.257950] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]524178cb-81be-dd45-4abe-547ffe87b671, 'name': SearchDatastore_Task, 'duration_secs': 0.009015} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.258570] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.258900] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 86f776d7-1ace-4e3c-8fa9-1562b97c832c/86f776d7-1ace-4e3c-8fa9-1562b97c832c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1419.259167] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11d74dbc-72ea-44d4-9f2d-8b2c0e2482aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.262946] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1419.262946] env[61978]: value = "task-1396100" [ 1419.262946] env[61978]: _type = "Task" [ 1419.262946] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.266724] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Waiting for the task: (returnval){ [ 1419.266724] env[61978]: value = "task-1396101" [ 1419.266724] env[61978]: _type = "Task" [ 1419.266724] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.272514] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396100, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.277315] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.380552] env[61978]: DEBUG nova.compute.manager [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1419.776138] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396100, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.779316] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396101, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479908} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.779558] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 86f776d7-1ace-4e3c-8fa9-1562b97c832c/86f776d7-1ace-4e3c-8fa9-1562b97c832c.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1419.779777] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1419.780037] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-392712ce-5f26-472d-a2b8-ff6bc874393e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.785950] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Waiting for the task: (returnval){ [ 1419.785950] env[61978]: value = "task-1396102" [ 1419.785950] env[61978]: _type = "Task" [ 1419.785950] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.792861] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396102, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.273685] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396100, 'name': ReconfigVM_Task, 'duration_secs': 0.544312} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.274071] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 6a454083-8d85-4a29-98dc-29eb0a072560/6a454083-8d85-4a29-98dc-29eb0a072560.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1420.274643] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3140433b-a819-4b78-8d48-609f258fab44 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.280693] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1420.280693] env[61978]: value = "task-1396103" [ 1420.280693] env[61978]: _type = "Task" [ 1420.280693] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.291242] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396103, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.299129] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396102, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073276} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.299430] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1420.300228] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0083e8-285d-49f3-8392-9d0f959a5f16 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.321166] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 86f776d7-1ace-4e3c-8fa9-1562b97c832c/86f776d7-1ace-4e3c-8fa9-1562b97c832c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1420.321602] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55f5eaf5-8803-4e04-93ce-290bdd0ce7b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.340690] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Waiting for the task: (returnval){ [ 1420.340690] env[61978]: value = "task-1396104" [ 1420.340690] env[61978]: _type = "Task" [ 1420.340690] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.348316] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396104, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.391817] env[61978]: DEBUG nova.compute.manager [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1420.419965] env[61978]: DEBUG nova.virt.hardware [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1420.420344] env[61978]: DEBUG nova.virt.hardware [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1420.420512] env[61978]: DEBUG nova.virt.hardware [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1420.420699] env[61978]: DEBUG nova.virt.hardware [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1420.420854] env[61978]: DEBUG nova.virt.hardware [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1420.421016] env[61978]: DEBUG nova.virt.hardware [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1420.421239] env[61978]: DEBUG nova.virt.hardware [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1420.421402] env[61978]: DEBUG nova.virt.hardware [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1420.421576] env[61978]: DEBUG nova.virt.hardware [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1420.421744] env[61978]: DEBUG nova.virt.hardware [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1420.421928] env[61978]: DEBUG nova.virt.hardware [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1420.422891] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fed88f-5e86-479d-b9ce-74c030994d0e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.430848] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41565a6-3275-47e4-aa49-05c837a51691 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.599769] env[61978]: DEBUG nova.compute.manager [req-dc14d751-36ee-402b-b865-f434665d1ed4 req-d46e1b9b-8272-471b-ad30-5a135b073097 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Received event network-vif-plugged-204d4c58-f413-4204-b406-205812a3832d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1420.599855] env[61978]: DEBUG oslo_concurrency.lockutils [req-dc14d751-36ee-402b-b865-f434665d1ed4 req-d46e1b9b-8272-471b-ad30-5a135b073097 service nova] Acquiring lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.600088] env[61978]: DEBUG oslo_concurrency.lockutils [req-dc14d751-36ee-402b-b865-f434665d1ed4 req-d46e1b9b-8272-471b-ad30-5a135b073097 service nova] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.600357] env[61978]: DEBUG oslo_concurrency.lockutils [req-dc14d751-36ee-402b-b865-f434665d1ed4 req-d46e1b9b-8272-471b-ad30-5a135b073097 service nova] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.600549] env[61978]: DEBUG nova.compute.manager [req-dc14d751-36ee-402b-b865-f434665d1ed4 req-d46e1b9b-8272-471b-ad30-5a135b073097 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] No waiting events found dispatching network-vif-plugged-204d4c58-f413-4204-b406-205812a3832d {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1420.600743] env[61978]: WARNING nova.compute.manager [req-dc14d751-36ee-402b-b865-f434665d1ed4 req-d46e1b9b-8272-471b-ad30-5a135b073097 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Received unexpected event network-vif-plugged-204d4c58-f413-4204-b406-205812a3832d for instance with vm_state building and task_state spawning. [ 1420.685366] env[61978]: DEBUG nova.network.neutron [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Successfully updated port: 204d4c58-f413-4204-b406-205812a3832d {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1420.790973] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396103, 'name': Rename_Task, 'duration_secs': 0.446329} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.791280] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1420.791536] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de3339af-a094-4564-9335-3c71e77aaf2f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.797453] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1420.797453] env[61978]: value = "task-1396105" [ 1420.797453] env[61978]: _type = "Task" [ 1420.797453] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.804819] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396105, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.849415] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396104, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.186640] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "refresh_cache-a6f73332-d0a5-4c52-8e38-8982e42ee62f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.186800] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired lock "refresh_cache-a6f73332-d0a5-4c52-8e38-8982e42ee62f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.186964] env[61978]: DEBUG nova.network.neutron [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1421.308161] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396105, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.353544] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396104, 'name': ReconfigVM_Task, 'duration_secs': 0.805406} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.353907] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 86f776d7-1ace-4e3c-8fa9-1562b97c832c/86f776d7-1ace-4e3c-8fa9-1562b97c832c.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1421.354773] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0df4e761-0829-4f87-a75c-41ee50c92d70 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.362693] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Waiting for the task: (returnval){ [ 1421.362693] env[61978]: value = "task-1396106" [ 1421.362693] env[61978]: _type = "Task" [ 1421.362693] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.370518] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396106, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.720851] env[61978]: DEBUG nova.network.neutron [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1421.809823] env[61978]: DEBUG oslo_vmware.api [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396105, 'name': PowerOnVM_Task, 'duration_secs': 0.581049} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.812042] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1421.812262] env[61978]: INFO nova.compute.manager [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Took 8.78 seconds to spawn the instance on the hypervisor. [ 1421.812454] env[61978]: DEBUG nova.compute.manager [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1421.813249] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c42049-1c34-4b09-9cf5-ace5dcb993dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.847414] env[61978]: DEBUG nova.network.neutron [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Updating instance_info_cache with network_info: [{"id": "204d4c58-f413-4204-b406-205812a3832d", "address": "fa:16:3e:e1:38:54", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap204d4c58-f4", "ovs_interfaceid": "204d4c58-f413-4204-b406-205812a3832d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.872475] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396106, 'name': Rename_Task, 'duration_secs': 0.132779} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.872773] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1421.873034] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4061e8a3-ad59-4d6d-b24c-248607c1c758 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.878522] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Waiting for the task: (returnval){ [ 1421.878522] env[61978]: value = "task-1396107" [ 1421.878522] env[61978]: _type = "Task" [ 1421.878522] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.885739] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396107, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.329850] env[61978]: INFO nova.compute.manager [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Took 15.27 seconds to build instance. [ 1422.352468] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Releasing lock "refresh_cache-a6f73332-d0a5-4c52-8e38-8982e42ee62f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1422.352812] env[61978]: DEBUG nova.compute.manager [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Instance network_info: |[{"id": "204d4c58-f413-4204-b406-205812a3832d", "address": "fa:16:3e:e1:38:54", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap204d4c58-f4", "ovs_interfaceid": "204d4c58-f413-4204-b406-205812a3832d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1422.353817] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:38:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '67921bdb-a7a0-46b5-ba05-ca997496e222', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '204d4c58-f413-4204-b406-205812a3832d', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1422.361273] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Creating folder: Project (2b289cdad1fe4ad38c5d987680be2367). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1422.361713] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c38488cd-a695-4e6d-bb67-e0b10facbd01 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.372241] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Created folder: Project (2b289cdad1fe4ad38c5d987680be2367) in parent group-v295764. [ 1422.372432] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Creating folder: Instances. Parent ref: group-v296067. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1422.372655] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff1ca0f5-08e0-4921-9eab-b8628c0e203d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.383839] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Created folder: Instances in parent group-v296067. [ 1422.384092] env[61978]: DEBUG oslo.service.loopingcall [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1422.384585] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1422.384790] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fdd18b7f-661e-4efc-82df-7e47bccc62dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.401494] env[61978]: DEBUG oslo_vmware.api [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396107, 'name': PowerOnVM_Task, 'duration_secs': 0.434599} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.402065] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1422.402309] env[61978]: INFO nova.compute.manager [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Took 7.24 seconds to spawn the instance on the hypervisor. [ 1422.402533] env[61978]: DEBUG nova.compute.manager [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1422.403269] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97331c7-cc2a-425c-87c0-6cce9ae936b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.406271] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1422.406271] env[61978]: value = "task-1396110" [ 1422.406271] env[61978]: _type = "Task" [ 1422.406271] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.417081] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396110, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.635360] env[61978]: DEBUG nova.compute.manager [req-31fa5457-ab68-48a9-9391-3940d516efd3 req-4f8ec9e8-0972-407c-8aaf-21d116f20c09 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Received event network-changed-204d4c58-f413-4204-b406-205812a3832d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1422.635360] env[61978]: DEBUG nova.compute.manager [req-31fa5457-ab68-48a9-9391-3940d516efd3 req-4f8ec9e8-0972-407c-8aaf-21d116f20c09 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Refreshing instance network info cache due to event network-changed-204d4c58-f413-4204-b406-205812a3832d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1422.635360] env[61978]: DEBUG oslo_concurrency.lockutils [req-31fa5457-ab68-48a9-9391-3940d516efd3 req-4f8ec9e8-0972-407c-8aaf-21d116f20c09 service nova] Acquiring lock "refresh_cache-a6f73332-d0a5-4c52-8e38-8982e42ee62f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.635360] env[61978]: DEBUG oslo_concurrency.lockutils [req-31fa5457-ab68-48a9-9391-3940d516efd3 req-4f8ec9e8-0972-407c-8aaf-21d116f20c09 service nova] Acquired lock "refresh_cache-a6f73332-d0a5-4c52-8e38-8982e42ee62f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.635360] env[61978]: DEBUG nova.network.neutron [req-31fa5457-ab68-48a9-9391-3940d516efd3 req-4f8ec9e8-0972-407c-8aaf-21d116f20c09 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Refreshing network info cache for port 204d4c58-f413-4204-b406-205812a3832d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1422.831983] env[61978]: DEBUG oslo_concurrency.lockutils [None req-a0583912-6b56-4d2d-a8f5-06d2df576313 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "6a454083-8d85-4a29-98dc-29eb0a072560" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.777s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.919490] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396110, 'name': CreateVM_Task, 'duration_secs': 0.315974} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.921098] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1422.921721] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.921899] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.922279] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1422.922506] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39c6270c-b44d-4bbc-992c-f06ef31dc5d2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.924348] env[61978]: INFO nova.compute.manager [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Took 14.39 seconds to build instance. [ 1422.928025] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1422.928025] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]526bd7b8-2d3f-d925-7928-1ee3e9271f33" [ 1422.928025] env[61978]: _type = "Task" [ 1422.928025] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.935801] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526bd7b8-2d3f-d925-7928-1ee3e9271f33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.334789] env[61978]: DEBUG nova.network.neutron [req-31fa5457-ab68-48a9-9391-3940d516efd3 req-4f8ec9e8-0972-407c-8aaf-21d116f20c09 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Updated VIF entry in instance network info cache for port 204d4c58-f413-4204-b406-205812a3832d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1423.335207] env[61978]: DEBUG nova.network.neutron [req-31fa5457-ab68-48a9-9391-3940d516efd3 req-4f8ec9e8-0972-407c-8aaf-21d116f20c09 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Updating instance_info_cache with network_info: [{"id": "204d4c58-f413-4204-b406-205812a3832d", "address": "fa:16:3e:e1:38:54", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap204d4c58-f4", "ovs_interfaceid": "204d4c58-f413-4204-b406-205812a3832d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.427237] env[61978]: DEBUG oslo_concurrency.lockutils [None req-25dd0ef7-f060-4f93-abf8-329199a32657 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Lock "86f776d7-1ace-4e3c-8fa9-1562b97c832c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.897s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.439637] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]526bd7b8-2d3f-d925-7928-1ee3e9271f33, 'name': SearchDatastore_Task, 'duration_secs': 0.010112} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.439996] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.440258] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1423.440499] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.440656] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.440841] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1423.441115] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3128d81-1f04-4491-833c-ebeccb28e4cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.449109] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1423.449109] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1423.449734] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c56507e6-8e53-4ac8-ad98-a35bbf044d67 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.454413] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1423.454413] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5241453d-87b1-4dd8-cecf-6a51aef02d89" [ 1423.454413] env[61978]: _type = "Task" [ 1423.454413] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.463057] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5241453d-87b1-4dd8-cecf-6a51aef02d89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.838311] env[61978]: DEBUG oslo_concurrency.lockutils [req-31fa5457-ab68-48a9-9391-3940d516efd3 req-4f8ec9e8-0972-407c-8aaf-21d116f20c09 service nova] Releasing lock "refresh_cache-a6f73332-d0a5-4c52-8e38-8982e42ee62f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.964934] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5241453d-87b1-4dd8-cecf-6a51aef02d89, 'name': SearchDatastore_Task, 'duration_secs': 0.007863} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.965751] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40945ee8-1c9e-4991-9241-2a3bfac156c0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.970751] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1423.970751] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5204ff05-6965-6781-483b-d7eba6ddbe1a" [ 1423.970751] env[61978]: _type = "Task" [ 1423.970751] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.978194] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5204ff05-6965-6781-483b-d7eba6ddbe1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.986742] env[61978]: DEBUG nova.compute.manager [None req-a5eb7fc8-e9b1-4680-a644-4e96f357586c tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1423.987555] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c17ca6-31f5-4eb7-a9af-1809bdec0e66 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.053062] env[61978]: DEBUG oslo_concurrency.lockutils [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Acquiring lock "86f776d7-1ace-4e3c-8fa9-1562b97c832c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.053410] env[61978]: DEBUG oslo_concurrency.lockutils [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Lock "86f776d7-1ace-4e3c-8fa9-1562b97c832c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.053718] env[61978]: DEBUG oslo_concurrency.lockutils [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Acquiring lock "86f776d7-1ace-4e3c-8fa9-1562b97c832c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.054502] env[61978]: DEBUG oslo_concurrency.lockutils [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Lock "86f776d7-1ace-4e3c-8fa9-1562b97c832c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.054783] env[61978]: DEBUG oslo_concurrency.lockutils [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Lock "86f776d7-1ace-4e3c-8fa9-1562b97c832c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.057420] env[61978]: INFO nova.compute.manager [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Terminating instance [ 1424.059257] env[61978]: DEBUG oslo_concurrency.lockutils [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Acquiring lock "refresh_cache-86f776d7-1ace-4e3c-8fa9-1562b97c832c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.059433] env[61978]: DEBUG oslo_concurrency.lockutils [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Acquired lock "refresh_cache-86f776d7-1ace-4e3c-8fa9-1562b97c832c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.059608] env[61978]: DEBUG nova.network.neutron [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1424.482058] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5204ff05-6965-6781-483b-d7eba6ddbe1a, 'name': SearchDatastore_Task, 'duration_secs': 0.009056} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.482455] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.482638] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a6f73332-d0a5-4c52-8e38-8982e42ee62f/a6f73332-d0a5-4c52-8e38-8982e42ee62f.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1424.482918] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57e8adfc-93a7-41d4-9e0d-1414e69512cb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.490026] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1424.490026] env[61978]: value = "task-1396111" [ 1424.490026] env[61978]: _type = "Task" [ 1424.490026] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.497967] env[61978]: INFO nova.compute.manager [None req-a5eb7fc8-e9b1-4680-a644-4e96f357586c tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] instance snapshotting [ 1424.498523] env[61978]: DEBUG nova.objects.instance [None req-a5eb7fc8-e9b1-4680-a644-4e96f357586c tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Lazy-loading 'flavor' on Instance uuid 86f776d7-1ace-4e3c-8fa9-1562b97c832c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1424.499830] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396111, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.578020] env[61978]: DEBUG nova.network.neutron [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1424.660149] env[61978]: DEBUG nova.network.neutron [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.687954] env[61978]: DEBUG nova.compute.manager [req-797a0c90-0673-40c6-8227-a3d95e6b2ddd req-1e0c5745-84da-45e8-acc7-b25c574af9d0 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Received event network-changed-6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1424.688256] env[61978]: DEBUG nova.compute.manager [req-797a0c90-0673-40c6-8227-a3d95e6b2ddd req-1e0c5745-84da-45e8-acc7-b25c574af9d0 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Refreshing instance network info cache due to event network-changed-6c4911ae-3f5e-46c7-9538-7e1a9811252e. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1424.688513] env[61978]: DEBUG oslo_concurrency.lockutils [req-797a0c90-0673-40c6-8227-a3d95e6b2ddd req-1e0c5745-84da-45e8-acc7-b25c574af9d0 service nova] Acquiring lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.688672] env[61978]: DEBUG oslo_concurrency.lockutils [req-797a0c90-0673-40c6-8227-a3d95e6b2ddd req-1e0c5745-84da-45e8-acc7-b25c574af9d0 service nova] Acquired lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.688868] env[61978]: DEBUG nova.network.neutron [req-797a0c90-0673-40c6-8227-a3d95e6b2ddd req-1e0c5745-84da-45e8-acc7-b25c574af9d0 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Refreshing network info cache for port 6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1425.000103] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396111, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.004616] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80741500-86bd-4d51-ade4-77985ebd3642 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.022680] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e772ba-4581-4c9e-9175-9e18732ce30c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.163641] env[61978]: DEBUG oslo_concurrency.lockutils [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Releasing lock "refresh_cache-86f776d7-1ace-4e3c-8fa9-1562b97c832c" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.164104] env[61978]: DEBUG nova.compute.manager [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1425.164312] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1425.165219] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f3c28a-0d4f-4535-9c01-572abe0f3073 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.172918] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1425.173194] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-671e7895-262f-4c42-8dad-93879f3daa2a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.179287] env[61978]: DEBUG oslo_vmware.api [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Waiting for the task: (returnval){ [ 1425.179287] env[61978]: value = "task-1396112" [ 1425.179287] env[61978]: _type = "Task" [ 1425.179287] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.187464] env[61978]: DEBUG oslo_vmware.api [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396112, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.417196] env[61978]: DEBUG nova.network.neutron [req-797a0c90-0673-40c6-8227-a3d95e6b2ddd req-1e0c5745-84da-45e8-acc7-b25c574af9d0 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updated VIF entry in instance network info cache for port 6c4911ae-3f5e-46c7-9538-7e1a9811252e. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1425.417593] env[61978]: DEBUG nova.network.neutron [req-797a0c90-0673-40c6-8227-a3d95e6b2ddd req-1e0c5745-84da-45e8-acc7-b25c574af9d0 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updating instance_info_cache with network_info: [{"id": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "address": "fa:16:3e:c3:c6:fc", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c4911ae-3f", "ovs_interfaceid": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.501607] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396111, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510229} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.502051] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a6f73332-d0a5-4c52-8e38-8982e42ee62f/a6f73332-d0a5-4c52-8e38-8982e42ee62f.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1425.502239] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1425.502540] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3394fbed-9245-490d-ae7f-15e75722ddf1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.509455] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1425.509455] env[61978]: value = "task-1396113" [ 1425.509455] env[61978]: _type = "Task" [ 1425.509455] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.518103] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396113, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.533329] env[61978]: DEBUG nova.compute.manager [None req-a5eb7fc8-e9b1-4680-a644-4e96f357586c tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Instance disappeared during snapshot {{(pid=61978) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 1425.689363] env[61978]: DEBUG oslo_vmware.api [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396112, 'name': PowerOffVM_Task, 'duration_secs': 0.381981} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.690055] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1425.690262] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1425.690534] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70bc2e6a-b121-4e1d-a03d-a9abdb048d36 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.713711] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1425.714007] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1425.714228] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Deleting the datastore file [datastore2] 86f776d7-1ace-4e3c-8fa9-1562b97c832c {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1425.714499] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be360c71-5413-4ed9-a87e-1699607991a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.720927] env[61978]: DEBUG oslo_vmware.api [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Waiting for the task: (returnval){ [ 1425.720927] env[61978]: value = "task-1396115" [ 1425.720927] env[61978]: _type = "Task" [ 1425.720927] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.730212] env[61978]: DEBUG oslo_vmware.api [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.731103] env[61978]: DEBUG nova.compute.manager [None req-a5eb7fc8-e9b1-4680-a644-4e96f357586c tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Found 0 images (rotation: 2) {{(pid=61978) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 1425.920852] env[61978]: DEBUG oslo_concurrency.lockutils [req-797a0c90-0673-40c6-8227-a3d95e6b2ddd req-1e0c5745-84da-45e8-acc7-b25c574af9d0 service nova] Releasing lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.020383] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396113, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101101} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.020611] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1426.021444] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c8a9b1-6470-4307-9900-bffc154a0d2d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.042812] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] a6f73332-d0a5-4c52-8e38-8982e42ee62f/a6f73332-d0a5-4c52-8e38-8982e42ee62f.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1426.043111] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7301e233-b489-409c-87fd-7695f22394ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.062158] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1426.062158] env[61978]: value = "task-1396116" [ 1426.062158] env[61978]: _type = "Task" [ 1426.062158] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.070446] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396116, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.231052] env[61978]: DEBUG oslo_vmware.api [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Task: {'id': task-1396115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.435188} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.231323] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1426.231549] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1426.231737] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1426.231920] env[61978]: INFO nova.compute.manager [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1426.232180] env[61978]: DEBUG oslo.service.loopingcall [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1426.232384] env[61978]: DEBUG nova.compute.manager [-] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1426.232480] env[61978]: DEBUG nova.network.neutron [-] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1426.248740] env[61978]: DEBUG nova.network.neutron [-] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1426.571946] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396116, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.751272] env[61978]: DEBUG nova.network.neutron [-] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.072875] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396116, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.253708] env[61978]: INFO nova.compute.manager [-] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Took 1.02 seconds to deallocate network for instance. [ 1427.573023] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396116, 'name': ReconfigVM_Task, 'duration_secs': 1.046654} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.573352] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Reconfigured VM instance instance-00000072 to attach disk [datastore2] a6f73332-d0a5-4c52-8e38-8982e42ee62f/a6f73332-d0a5-4c52-8e38-8982e42ee62f.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1427.573945] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b4298d6-9b35-40ad-ab7c-9b7c6425a0d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.580193] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1427.580193] env[61978]: value = "task-1396117" [ 1427.580193] env[61978]: _type = "Task" [ 1427.580193] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.587070] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396117, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.760618] env[61978]: DEBUG oslo_concurrency.lockutils [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.761971] env[61978]: DEBUG oslo_concurrency.lockutils [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1427.761971] env[61978]: DEBUG nova.objects.instance [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Lazy-loading 'resources' on Instance uuid 86f776d7-1ace-4e3c-8fa9-1562b97c832c {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1428.089304] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396117, 'name': Rename_Task, 'duration_secs': 0.128529} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.089564] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1428.089807] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1484ce4-ffdf-4a0e-8ac7-b7e7dc726910 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.095203] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1428.095203] env[61978]: value = "task-1396118" [ 1428.095203] env[61978]: _type = "Task" [ 1428.095203] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.102149] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396118, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.422819] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f996a74-85e3-44fc-8783-8eeba3cf0e77 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.431844] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9bdf90-cca7-4db0-af0d-d6f53f1c4f32 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.465321] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63617ed-036c-4d0e-87b6-21c8f0a33982 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.475074] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad96826-09e6-418a-bcaa-b6d8b95df89a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.494299] env[61978]: DEBUG nova.compute.provider_tree [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1428.605136] env[61978]: DEBUG oslo_vmware.api [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396118, 'name': PowerOnVM_Task, 'duration_secs': 0.405342} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.605487] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1428.605582] env[61978]: INFO nova.compute.manager [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Took 8.21 seconds to spawn the instance on the hypervisor. [ 1428.605751] env[61978]: DEBUG nova.compute.manager [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1428.606530] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b9464c-ce84-4fc1-a620-c16b20e31acd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.998292] env[61978]: DEBUG nova.scheduler.client.report [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1429.122170] env[61978]: INFO nova.compute.manager [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Took 12.90 seconds to build instance. [ 1429.506836] env[61978]: DEBUG oslo_concurrency.lockutils [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.745s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.531951] env[61978]: INFO nova.scheduler.client.report [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Deleted allocations for instance 86f776d7-1ace-4e3c-8fa9-1562b97c832c [ 1429.624231] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1adbdb5b-e99f-4dfe-a091-1e059342e61c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.414s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.664799] env[61978]: DEBUG nova.compute.manager [req-4e1c819c-f84d-42c9-881f-f889b0519f59 req-5a5f0f11-dabb-4caf-87b3-8bbbcbb91243 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Received event network-changed-204d4c58-f413-4204-b406-205812a3832d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1429.665080] env[61978]: DEBUG nova.compute.manager [req-4e1c819c-f84d-42c9-881f-f889b0519f59 req-5a5f0f11-dabb-4caf-87b3-8bbbcbb91243 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Refreshing instance network info cache due to event network-changed-204d4c58-f413-4204-b406-205812a3832d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1429.665234] env[61978]: DEBUG oslo_concurrency.lockutils [req-4e1c819c-f84d-42c9-881f-f889b0519f59 req-5a5f0f11-dabb-4caf-87b3-8bbbcbb91243 service nova] Acquiring lock "refresh_cache-a6f73332-d0a5-4c52-8e38-8982e42ee62f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.665384] env[61978]: DEBUG oslo_concurrency.lockutils [req-4e1c819c-f84d-42c9-881f-f889b0519f59 req-5a5f0f11-dabb-4caf-87b3-8bbbcbb91243 service nova] Acquired lock "refresh_cache-a6f73332-d0a5-4c52-8e38-8982e42ee62f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.665548] env[61978]: DEBUG nova.network.neutron [req-4e1c819c-f84d-42c9-881f-f889b0519f59 req-5a5f0f11-dabb-4caf-87b3-8bbbcbb91243 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Refreshing network info cache for port 204d4c58-f413-4204-b406-205812a3832d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1430.043090] env[61978]: DEBUG oslo_concurrency.lockutils [None req-97d4ef53-15dc-419e-aa1c-142a67d6ac82 tempest-ServersAaction247Test-1409111070 tempest-ServersAaction247Test-1409111070-project-member] Lock "86f776d7-1ace-4e3c-8fa9-1562b97c832c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.990s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.657687] env[61978]: DEBUG nova.network.neutron [req-4e1c819c-f84d-42c9-881f-f889b0519f59 req-5a5f0f11-dabb-4caf-87b3-8bbbcbb91243 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Updated VIF entry in instance network info cache for port 204d4c58-f413-4204-b406-205812a3832d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1430.658131] env[61978]: DEBUG nova.network.neutron [req-4e1c819c-f84d-42c9-881f-f889b0519f59 req-5a5f0f11-dabb-4caf-87b3-8bbbcbb91243 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Updating instance_info_cache with network_info: [{"id": "204d4c58-f413-4204-b406-205812a3832d", "address": "fa:16:3e:e1:38:54", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap204d4c58-f4", "ovs_interfaceid": "204d4c58-f413-4204-b406-205812a3832d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.162441] env[61978]: DEBUG oslo_concurrency.lockutils [req-4e1c819c-f84d-42c9-881f-f889b0519f59 req-5a5f0f11-dabb-4caf-87b3-8bbbcbb91243 service nova] Releasing lock "refresh_cache-a6f73332-d0a5-4c52-8e38-8982e42ee62f" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.616142] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Acquiring lock "31c61275-c058-4c3e-8580-0958489d01a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.616460] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Lock "31c61275-c058-4c3e-8580-0958489d01a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.121151] env[61978]: DEBUG nova.compute.manager [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1434.643183] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.643520] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.644935] env[61978]: INFO nova.compute.claims [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1435.709580] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11dec0d6-f018-4848-a6e0-c9feab835adb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.716820] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85c6a1a-2a91-4f70-b4c6-f84ef18a302f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.747400] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71d74b0-1afe-4cb7-83e0-365a6a5f1aed {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.754358] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21ae983-0110-439b-958b-b39b96987872 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.767289] env[61978]: DEBUG nova.compute.provider_tree [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.270540] env[61978]: DEBUG nova.scheduler.client.report [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1436.776329] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.133s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.776867] env[61978]: DEBUG nova.compute.manager [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1437.282988] env[61978]: DEBUG nova.compute.utils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1437.284265] env[61978]: DEBUG nova.compute.manager [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1437.284411] env[61978]: DEBUG nova.network.neutron [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1437.334856] env[61978]: DEBUG nova.policy [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c44b077171a8492886bfb7488f13ad9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd13397701eb14e91959deb41e01a47e4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1437.577712] env[61978]: DEBUG nova.network.neutron [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Successfully created port: 89d06079-7673-4b25-9663-c22c0bf4f123 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1437.788121] env[61978]: DEBUG nova.compute.manager [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1438.797737] env[61978]: DEBUG nova.compute.manager [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1438.836369] env[61978]: DEBUG nova.virt.hardware [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1438.836636] env[61978]: DEBUG nova.virt.hardware [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1438.836800] env[61978]: DEBUG nova.virt.hardware [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1438.836988] env[61978]: DEBUG nova.virt.hardware [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1438.837159] env[61978]: DEBUG nova.virt.hardware [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1438.837315] env[61978]: DEBUG nova.virt.hardware [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1438.837529] env[61978]: DEBUG nova.virt.hardware [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1438.837696] env[61978]: DEBUG nova.virt.hardware [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1438.837905] env[61978]: DEBUG nova.virt.hardware [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1438.838103] env[61978]: DEBUG nova.virt.hardware [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1438.838286] env[61978]: DEBUG nova.virt.hardware [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1438.839196] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf2c8db-3493-46b5-a3bf-81c64039ab5f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.846771] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68805d41-080a-4133-a9e2-e1197cf67271 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.992730] env[61978]: DEBUG nova.compute.manager [req-01921e4c-a59d-4f5a-8b40-0ed44cbbb061 req-53bac206-d17f-4c9b-b72f-fbd25d69f194 service nova] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Received event network-vif-plugged-89d06079-7673-4b25-9663-c22c0bf4f123 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1438.993012] env[61978]: DEBUG oslo_concurrency.lockutils [req-01921e4c-a59d-4f5a-8b40-0ed44cbbb061 req-53bac206-d17f-4c9b-b72f-fbd25d69f194 service nova] Acquiring lock "31c61275-c058-4c3e-8580-0958489d01a0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.993242] env[61978]: DEBUG oslo_concurrency.lockutils [req-01921e4c-a59d-4f5a-8b40-0ed44cbbb061 req-53bac206-d17f-4c9b-b72f-fbd25d69f194 service nova] Lock "31c61275-c058-4c3e-8580-0958489d01a0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.993418] env[61978]: DEBUG oslo_concurrency.lockutils [req-01921e4c-a59d-4f5a-8b40-0ed44cbbb061 req-53bac206-d17f-4c9b-b72f-fbd25d69f194 service nova] Lock "31c61275-c058-4c3e-8580-0958489d01a0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.993594] env[61978]: DEBUG nova.compute.manager [req-01921e4c-a59d-4f5a-8b40-0ed44cbbb061 req-53bac206-d17f-4c9b-b72f-fbd25d69f194 service nova] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] No waiting events found dispatching network-vif-plugged-89d06079-7673-4b25-9663-c22c0bf4f123 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1438.993764] env[61978]: WARNING nova.compute.manager [req-01921e4c-a59d-4f5a-8b40-0ed44cbbb061 req-53bac206-d17f-4c9b-b72f-fbd25d69f194 service nova] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Received unexpected event network-vif-plugged-89d06079-7673-4b25-9663-c22c0bf4f123 for instance with vm_state building and task_state spawning. [ 1439.048302] env[61978]: DEBUG nova.network.neutron [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Successfully updated port: 89d06079-7673-4b25-9663-c22c0bf4f123 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1439.550851] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Acquiring lock "refresh_cache-31c61275-c058-4c3e-8580-0958489d01a0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.551066] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Acquired lock "refresh_cache-31c61275-c058-4c3e-8580-0958489d01a0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.551259] env[61978]: DEBUG nova.network.neutron [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1440.083389] env[61978]: DEBUG nova.network.neutron [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1440.199274] env[61978]: DEBUG nova.network.neutron [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Updating instance_info_cache with network_info: [{"id": "89d06079-7673-4b25-9663-c22c0bf4f123", "address": "fa:16:3e:76:bf:0a", "network": {"id": "8a539af5-5c06-4732-9479-8dedfeb724f6", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1521483587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d13397701eb14e91959deb41e01a47e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89d06079-76", "ovs_interfaceid": "89d06079-7673-4b25-9663-c22c0bf4f123", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.702237] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Releasing lock "refresh_cache-31c61275-c058-4c3e-8580-0958489d01a0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.702585] env[61978]: DEBUG nova.compute.manager [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Instance network_info: |[{"id": "89d06079-7673-4b25-9663-c22c0bf4f123", "address": "fa:16:3e:76:bf:0a", "network": {"id": "8a539af5-5c06-4732-9479-8dedfeb724f6", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1521483587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d13397701eb14e91959deb41e01a47e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89d06079-76", "ovs_interfaceid": "89d06079-7673-4b25-9663-c22c0bf4f123", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1440.703090] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:bf:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89d06079-7673-4b25-9663-c22c0bf4f123', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1440.710435] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Creating folder: Project (d13397701eb14e91959deb41e01a47e4). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1440.710713] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f43b3d7b-abe4-4533-9dcb-ef859d3a3dfc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.722049] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Created folder: Project (d13397701eb14e91959deb41e01a47e4) in parent group-v295764. [ 1440.722248] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Creating folder: Instances. Parent ref: group-v296070. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1440.722489] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61770ee7-2948-45db-b2b0-97384b842506 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.731960] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Created folder: Instances in parent group-v296070. [ 1440.732197] env[61978]: DEBUG oslo.service.loopingcall [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1440.732382] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1440.732573] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4177b7a0-5797-4899-9e48-4a886a816822 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.750466] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1440.750466] env[61978]: value = "task-1396121" [ 1440.750466] env[61978]: _type = "Task" [ 1440.750466] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.761091] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396121, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.018575] env[61978]: DEBUG nova.compute.manager [req-afdd3220-cbdf-41f4-ab2e-5bb59f19e87e req-8a523fa0-d3a8-401b-a0be-49fc86501b7b service nova] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Received event network-changed-89d06079-7673-4b25-9663-c22c0bf4f123 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1441.018847] env[61978]: DEBUG nova.compute.manager [req-afdd3220-cbdf-41f4-ab2e-5bb59f19e87e req-8a523fa0-d3a8-401b-a0be-49fc86501b7b service nova] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Refreshing instance network info cache due to event network-changed-89d06079-7673-4b25-9663-c22c0bf4f123. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1441.019111] env[61978]: DEBUG oslo_concurrency.lockutils [req-afdd3220-cbdf-41f4-ab2e-5bb59f19e87e req-8a523fa0-d3a8-401b-a0be-49fc86501b7b service nova] Acquiring lock "refresh_cache-31c61275-c058-4c3e-8580-0958489d01a0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1441.019294] env[61978]: DEBUG oslo_concurrency.lockutils [req-afdd3220-cbdf-41f4-ab2e-5bb59f19e87e req-8a523fa0-d3a8-401b-a0be-49fc86501b7b service nova] Acquired lock "refresh_cache-31c61275-c058-4c3e-8580-0958489d01a0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.019512] env[61978]: DEBUG nova.network.neutron [req-afdd3220-cbdf-41f4-ab2e-5bb59f19e87e req-8a523fa0-d3a8-401b-a0be-49fc86501b7b service nova] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Refreshing network info cache for port 89d06079-7673-4b25-9663-c22c0bf4f123 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1441.259984] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396121, 'name': CreateVM_Task, 'duration_secs': 0.286602} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.260338] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1441.266915] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1441.267114] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.267438] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1441.267684] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59881878-56d4-4e2d-ade2-a9c5686bb03f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.272187] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Waiting for the task: (returnval){ [ 1441.272187] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523e9fea-9382-519c-cd90-6f0dfbddb27a" [ 1441.272187] env[61978]: _type = "Task" [ 1441.272187] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.279934] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523e9fea-9382-519c-cd90-6f0dfbddb27a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.698517] env[61978]: DEBUG nova.network.neutron [req-afdd3220-cbdf-41f4-ab2e-5bb59f19e87e req-8a523fa0-d3a8-401b-a0be-49fc86501b7b service nova] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Updated VIF entry in instance network info cache for port 89d06079-7673-4b25-9663-c22c0bf4f123. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1441.698888] env[61978]: DEBUG nova.network.neutron [req-afdd3220-cbdf-41f4-ab2e-5bb59f19e87e req-8a523fa0-d3a8-401b-a0be-49fc86501b7b service nova] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Updating instance_info_cache with network_info: [{"id": "89d06079-7673-4b25-9663-c22c0bf4f123", "address": "fa:16:3e:76:bf:0a", "network": {"id": "8a539af5-5c06-4732-9479-8dedfeb724f6", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1521483587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d13397701eb14e91959deb41e01a47e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89d06079-76", "ovs_interfaceid": "89d06079-7673-4b25-9663-c22c0bf4f123", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.782337] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]523e9fea-9382-519c-cd90-6f0dfbddb27a, 'name': SearchDatastore_Task, 'duration_secs': 0.011094} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.782596] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1441.782833] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1441.783088] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1441.783401] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.783437] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1441.783667] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdbbde96-0216-4e3f-a9ed-b7823ce2def5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.791674] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1441.791879] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1441.792684] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bdc9dbe-93be-484a-96b3-61b7c15dd128 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.798337] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Waiting for the task: (returnval){ [ 1441.798337] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52943335-6c68-29dd-1865-ffe704139613" [ 1441.798337] env[61978]: _type = "Task" [ 1441.798337] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.808787] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52943335-6c68-29dd-1865-ffe704139613, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.201799] env[61978]: DEBUG oslo_concurrency.lockutils [req-afdd3220-cbdf-41f4-ab2e-5bb59f19e87e req-8a523fa0-d3a8-401b-a0be-49fc86501b7b service nova] Releasing lock "refresh_cache-31c61275-c058-4c3e-8580-0958489d01a0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.308875] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52943335-6c68-29dd-1865-ffe704139613, 'name': SearchDatastore_Task, 'duration_secs': 0.010063} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.309677] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e8aacb6-ef73-4efc-82b5-2a01486460f5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.315080] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Waiting for the task: (returnval){ [ 1442.315080] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bfb90e-e965-db7d-23a4-d00871d45560" [ 1442.315080] env[61978]: _type = "Task" [ 1442.315080] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.322592] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bfb90e-e965-db7d-23a4-d00871d45560, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.608327] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6571ee5a-1ae0-4540-8257-8b000685bbe7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "30d6cc11-0258-47aa-b083-7c103c91acf2" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.608327] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6571ee5a-1ae0-4540-8257-8b000685bbe7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "30d6cc11-0258-47aa-b083-7c103c91acf2" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.608443] env[61978]: DEBUG nova.compute.manager [None req-6571ee5a-1ae0-4540-8257-8b000685bbe7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1442.609312] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a590d3ce-8d61-4e1f-af3c-6fc672b98f19 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.615997] env[61978]: DEBUG nova.compute.manager [None req-6571ee5a-1ae0-4540-8257-8b000685bbe7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61978) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1442.616577] env[61978]: DEBUG nova.objects.instance [None req-6571ee5a-1ae0-4540-8257-8b000685bbe7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'flavor' on Instance uuid 30d6cc11-0258-47aa-b083-7c103c91acf2 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1442.824473] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52bfb90e-e965-db7d-23a4-d00871d45560, 'name': SearchDatastore_Task, 'duration_secs': 0.009237} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.824742] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.824999] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 31c61275-c058-4c3e-8580-0958489d01a0/31c61275-c058-4c3e-8580-0958489d01a0.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1442.825265] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a42eb0e0-04e4-44f8-ad1c-c87d2c3378c7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.831163] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Waiting for the task: (returnval){ [ 1442.831163] env[61978]: value = "task-1396122" [ 1442.831163] env[61978]: _type = "Task" [ 1442.831163] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.838098] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.122461] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6571ee5a-1ae0-4540-8257-8b000685bbe7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1443.122761] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70eaeb52-4854-4a30-b3c4-7380a56cab1b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.129575] env[61978]: DEBUG oslo_vmware.api [None req-6571ee5a-1ae0-4540-8257-8b000685bbe7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1443.129575] env[61978]: value = "task-1396123" [ 1443.129575] env[61978]: _type = "Task" [ 1443.129575] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.138224] env[61978]: DEBUG oslo_vmware.api [None req-6571ee5a-1ae0-4540-8257-8b000685bbe7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396123, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.341313] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396122, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503762} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.341603] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 31c61275-c058-4c3e-8580-0958489d01a0/31c61275-c058-4c3e-8580-0958489d01a0.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1443.341824] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1443.342107] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-104b91f2-9c7d-4c99-981b-e7ace2b98b21 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.348219] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Waiting for the task: (returnval){ [ 1443.348219] env[61978]: value = "task-1396124" [ 1443.348219] env[61978]: _type = "Task" [ 1443.348219] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.355289] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396124, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.638734] env[61978]: DEBUG oslo_vmware.api [None req-6571ee5a-1ae0-4540-8257-8b000685bbe7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396123, 'name': PowerOffVM_Task, 'duration_secs': 0.386991} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.639058] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6571ee5a-1ae0-4540-8257-8b000685bbe7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1443.639254] env[61978]: DEBUG nova.compute.manager [None req-6571ee5a-1ae0-4540-8257-8b000685bbe7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1443.640365] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9674b11-f238-48c7-b7ad-598d76d35ed9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.858022] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396124, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077781} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.858330] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1443.859135] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3667ca75-8820-4396-9027-f38e91856cf1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.880292] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 31c61275-c058-4c3e-8580-0958489d01a0/31c61275-c058-4c3e-8580-0958489d01a0.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1443.880572] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc3a2a1d-89fc-4925-8a78-e3d2449ee2c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.900963] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Waiting for the task: (returnval){ [ 1443.900963] env[61978]: value = "task-1396125" [ 1443.900963] env[61978]: _type = "Task" [ 1443.900963] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.908574] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396125, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.152545] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6571ee5a-1ae0-4540-8257-8b000685bbe7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "30d6cc11-0258-47aa-b083-7c103c91acf2" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.544s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.410505] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396125, 'name': ReconfigVM_Task, 'duration_secs': 0.285272} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.410884] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 31c61275-c058-4c3e-8580-0958489d01a0/31c61275-c058-4c3e-8580-0958489d01a0.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1444.411480] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ee10011-6f8c-4ca8-a192-4faa83dc7a20 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.417416] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Waiting for the task: (returnval){ [ 1444.417416] env[61978]: value = "task-1396126" [ 1444.417416] env[61978]: _type = "Task" [ 1444.417416] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.424986] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396126, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.927788] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396126, 'name': Rename_Task, 'duration_secs': 0.160977} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.928118] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1444.928377] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70ae70b6-1c7a-4a78-890f-2137b76340d2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.935086] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Waiting for the task: (returnval){ [ 1444.935086] env[61978]: value = "task-1396127" [ 1444.935086] env[61978]: _type = "Task" [ 1444.935086] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.943389] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396127, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.990524] env[61978]: DEBUG nova.objects.instance [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'flavor' on Instance uuid 30d6cc11-0258-47aa-b083-7c103c91acf2 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1445.444303] env[61978]: DEBUG oslo_vmware.api [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396127, 'name': PowerOnVM_Task, 'duration_secs': 0.42096} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.444652] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1445.444775] env[61978]: INFO nova.compute.manager [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Took 6.65 seconds to spawn the instance on the hypervisor. [ 1445.444961] env[61978]: DEBUG nova.compute.manager [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1445.445711] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1d2a22-c747-4555-b0ff-5e6457d52fa4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.495505] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.495684] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.496076] env[61978]: DEBUG nova.network.neutron [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1445.496263] env[61978]: DEBUG nova.objects.instance [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'info_cache' on Instance uuid 30d6cc11-0258-47aa-b083-7c103c91acf2 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1445.963593] env[61978]: INFO nova.compute.manager [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Took 11.34 seconds to build instance. [ 1445.999679] env[61978]: DEBUG nova.objects.base [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Object Instance<30d6cc11-0258-47aa-b083-7c103c91acf2> lazy-loaded attributes: flavor,info_cache {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1446.465784] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d755f81d-a97c-4601-871a-e69b1264c126 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Lock "31c61275-c058-4c3e-8580-0958489d01a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.849s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.717314] env[61978]: DEBUG nova.network.neutron [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Updating instance_info_cache with network_info: [{"id": "e1688d68-33f0-48a2-8d22-475b9f9bacfb", "address": "fa:16:3e:dd:fe:12", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1688d68-33", "ovs_interfaceid": "e1688d68-33f0-48a2-8d22-475b9f9bacfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.076370] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Acquiring lock "31c61275-c058-4c3e-8580-0958489d01a0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.076589] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Lock "31c61275-c058-4c3e-8580-0958489d01a0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.076813] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Acquiring lock "31c61275-c058-4c3e-8580-0958489d01a0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.077021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Lock "31c61275-c058-4c3e-8580-0958489d01a0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.077212] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Lock "31c61275-c058-4c3e-8580-0958489d01a0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.079542] env[61978]: INFO nova.compute.manager [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Terminating instance [ 1447.081365] env[61978]: DEBUG nova.compute.manager [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1447.081565] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1447.082408] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145b647c-13d1-42bc-aea6-da31703a6710 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.089689] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1447.089920] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-640fcaf5-85ad-489a-b332-3e6c9d96974c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.095832] env[61978]: DEBUG oslo_vmware.api [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Waiting for the task: (returnval){ [ 1447.095832] env[61978]: value = "task-1396128" [ 1447.095832] env[61978]: _type = "Task" [ 1447.095832] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.102988] env[61978]: DEBUG oslo_vmware.api [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396128, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.220053] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.605234] env[61978]: DEBUG oslo_vmware.api [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396128, 'name': PowerOffVM_Task, 'duration_secs': 0.189528} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.605602] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1447.605662] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1447.605934] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa793656-497a-4015-adc0-8d57ba14955f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.667203] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1447.667433] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1447.667625] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Deleting the datastore file [datastore2] 31c61275-c058-4c3e-8580-0958489d01a0 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1447.667932] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3c52789-e677-4643-9e53-4868674d67df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.674171] env[61978]: DEBUG oslo_vmware.api [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Waiting for the task: (returnval){ [ 1447.674171] env[61978]: value = "task-1396130" [ 1447.674171] env[61978]: _type = "Task" [ 1447.674171] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.681655] env[61978]: DEBUG oslo_vmware.api [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396130, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.723768] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1447.724067] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22b1686f-a82b-47b2-8d88-d6708521cfea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.730259] env[61978]: DEBUG oslo_vmware.api [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1447.730259] env[61978]: value = "task-1396131" [ 1447.730259] env[61978]: _type = "Task" [ 1447.730259] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.737462] env[61978]: DEBUG oslo_vmware.api [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.183651] env[61978]: DEBUG oslo_vmware.api [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Task: {'id': task-1396130, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150835} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.183921] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1448.184134] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1448.184323] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1448.184506] env[61978]: INFO nova.compute.manager [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1448.184747] env[61978]: DEBUG oslo.service.loopingcall [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1448.184973] env[61978]: DEBUG nova.compute.manager [-] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1448.185093] env[61978]: DEBUG nova.network.neutron [-] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1448.238894] env[61978]: DEBUG oslo_vmware.api [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396131, 'name': PowerOnVM_Task, 'duration_secs': 0.451296} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.240890] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1448.241121] env[61978]: DEBUG nova.compute.manager [None req-6f2efa7a-537e-49e8-9770-158ee1a10de6 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1448.241854] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6346088a-503c-445a-aaf2-459a0098c878 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.437784] env[61978]: DEBUG nova.compute.manager [req-f4e69e2f-6c0e-42ca-9d35-5f5af4fa2681 req-2ec2d4fd-477d-425e-b232-07d3241e92bd service nova] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Received event network-vif-deleted-89d06079-7673-4b25-9663-c22c0bf4f123 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1448.438073] env[61978]: INFO nova.compute.manager [req-f4e69e2f-6c0e-42ca-9d35-5f5af4fa2681 req-2ec2d4fd-477d-425e-b232-07d3241e92bd service nova] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Neutron deleted interface 89d06079-7673-4b25-9663-c22c0bf4f123; detaching it from the instance and deleting it from the info cache [ 1448.438533] env[61978]: DEBUG nova.network.neutron [req-f4e69e2f-6c0e-42ca-9d35-5f5af4fa2681 req-2ec2d4fd-477d-425e-b232-07d3241e92bd service nova] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.918207] env[61978]: DEBUG nova.network.neutron [-] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.941189] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4007af37-3d3e-4404-a6ca-a3204f8e555b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.950808] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288c7a3b-408e-405f-8de9-2bfde03e40e2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.975826] env[61978]: DEBUG nova.compute.manager [req-f4e69e2f-6c0e-42ca-9d35-5f5af4fa2681 req-2ec2d4fd-477d-425e-b232-07d3241e92bd service nova] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Detach interface failed, port_id=89d06079-7673-4b25-9663-c22c0bf4f123, reason: Instance 31c61275-c058-4c3e-8580-0958489d01a0 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1449.420662] env[61978]: INFO nova.compute.manager [-] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Took 1.24 seconds to deallocate network for instance. [ 1449.927272] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.927619] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.927888] env[61978]: DEBUG nova.objects.instance [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Lazy-loading 'resources' on Instance uuid 31c61275-c058-4c3e-8580-0958489d01a0 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1449.966985] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80589847-1867-4b8a-936c-bf5d86712f24 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.974108] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86f3f8b8-e772-4830-842b-f9f26ae052e7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Suspending the VM {{(pid=61978) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1449.974407] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-6c9cf955-56cf-4fc3-9400-93c0d196c986 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.980420] env[61978]: DEBUG oslo_vmware.api [None req-86f3f8b8-e772-4830-842b-f9f26ae052e7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1449.980420] env[61978]: value = "task-1396132" [ 1449.980420] env[61978]: _type = "Task" [ 1449.980420] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.988409] env[61978]: DEBUG oslo_vmware.api [None req-86f3f8b8-e772-4830-842b-f9f26ae052e7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396132, 'name': SuspendVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.497863] env[61978]: DEBUG oslo_vmware.api [None req-86f3f8b8-e772-4830-842b-f9f26ae052e7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396132, 'name': SuspendVM_Task} progress is 75%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.514727] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b78dd8-c8e3-4092-8b5c-fcb8e23fa9ab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.521767] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3404d332-cfa7-496b-a290-41ec93ebe244 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.553343] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df82949-28e9-4e48-ac2b-d12854f3b08f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.560978] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae962fa0-b228-4f1d-aff8-036893677686 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.574324] env[61978]: DEBUG nova.compute.provider_tree [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1450.990726] env[61978]: DEBUG oslo_vmware.api [None req-86f3f8b8-e772-4830-842b-f9f26ae052e7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396132, 'name': SuspendVM_Task, 'duration_secs': 0.590234} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.991014] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-86f3f8b8-e772-4830-842b-f9f26ae052e7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Suspended the VM {{(pid=61978) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1450.991206] env[61978]: DEBUG nova.compute.manager [None req-86f3f8b8-e772-4830-842b-f9f26ae052e7 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1450.991940] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6bdd92c-8fa5-4bca-a433-494e271732de {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.077661] env[61978]: DEBUG nova.scheduler.client.report [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1451.582376] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.655s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.600259] env[61978]: INFO nova.scheduler.client.report [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Deleted allocations for instance 31c61275-c058-4c3e-8580-0958489d01a0 [ 1452.108648] env[61978]: DEBUG oslo_concurrency.lockutils [None req-dbf8d7fe-2b99-479b-8501-387ef03d0af6 tempest-ServerMetadataNegativeTestJSON-1720666868 tempest-ServerMetadataNegativeTestJSON-1720666868-project-member] Lock "31c61275-c058-4c3e-8580-0958489d01a0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.031s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.293184] env[61978]: INFO nova.compute.manager [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Resuming [ 1452.293820] env[61978]: DEBUG nova.objects.instance [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'flavor' on Instance uuid 30d6cc11-0258-47aa-b083-7c103c91acf2 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1453.301834] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.302122] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquired lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.302244] env[61978]: DEBUG nova.network.neutron [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1454.012408] env[61978]: DEBUG nova.network.neutron [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Updating instance_info_cache with network_info: [{"id": "e1688d68-33f0-48a2-8d22-475b9f9bacfb", "address": "fa:16:3e:dd:fe:12", "network": {"id": "565bf181-ca02-4990-8167-b2de7b3b7356", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-704022963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86ad52b551104a2594f1dbbc287f9efa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1688d68-33", "ovs_interfaceid": "e1688d68-33f0-48a2-8d22-475b9f9bacfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1454.516130] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Releasing lock "refresh_cache-30d6cc11-0258-47aa-b083-7c103c91acf2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.516794] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce36e3d8-7569-4570-995d-7e203c94b4be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.524286] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Resuming the VM {{(pid=61978) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1454.524532] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-deba8d68-2b77-4a3b-8bf1-445d3eaaddb2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.530753] env[61978]: DEBUG oslo_vmware.api [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1454.530753] env[61978]: value = "task-1396133" [ 1454.530753] env[61978]: _type = "Task" [ 1454.530753] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.538343] env[61978]: DEBUG oslo_vmware.api [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396133, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.041630] env[61978]: DEBUG oslo_vmware.api [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396133, 'name': PowerOnVM_Task, 'duration_secs': 0.435711} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.041630] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Resumed the VM {{(pid=61978) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1455.041878] env[61978]: DEBUG nova.compute.manager [None req-f8ac9331-fb40-4119-8779-75e8b1ca120e tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1455.042686] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d817680-3950-4335-bdcb-edb4425df83b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.907380] env[61978]: DEBUG oslo_concurrency.lockutils [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "30d6cc11-0258-47aa-b083-7c103c91acf2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.907747] env[61978]: DEBUG oslo_concurrency.lockutils [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "30d6cc11-0258-47aa-b083-7c103c91acf2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.907881] env[61978]: DEBUG oslo_concurrency.lockutils [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "30d6cc11-0258-47aa-b083-7c103c91acf2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.908132] env[61978]: DEBUG oslo_concurrency.lockutils [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "30d6cc11-0258-47aa-b083-7c103c91acf2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.908328] env[61978]: DEBUG oslo_concurrency.lockutils [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "30d6cc11-0258-47aa-b083-7c103c91acf2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.910547] env[61978]: INFO nova.compute.manager [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Terminating instance [ 1455.912314] env[61978]: DEBUG nova.compute.manager [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1455.912517] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1455.913360] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de73984-02f6-4084-b3e6-c3f5e66ab1f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.921017] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1455.921250] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bb2e4ec-00b7-408c-885b-91dbe058cef7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.928256] env[61978]: DEBUG oslo_vmware.api [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1455.928256] env[61978]: value = "task-1396134" [ 1455.928256] env[61978]: _type = "Task" [ 1455.928256] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.935510] env[61978]: DEBUG oslo_vmware.api [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396134, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.438615] env[61978]: DEBUG oslo_vmware.api [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396134, 'name': PowerOffVM_Task, 'duration_secs': 0.175583} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.438885] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1456.439074] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1456.439331] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2c92b2b-14ae-4e98-8016-1d9fc141bdd6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.504226] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1456.504464] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1456.504664] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleting the datastore file [datastore2] 30d6cc11-0258-47aa-b083-7c103c91acf2 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1456.505030] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58f7eb19-42c4-45f5-b546-a7ceaa94f598 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.511766] env[61978]: DEBUG oslo_vmware.api [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for the task: (returnval){ [ 1456.511766] env[61978]: value = "task-1396136" [ 1456.511766] env[61978]: _type = "Task" [ 1456.511766] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.520328] env[61978]: DEBUG oslo_vmware.api [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396136, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.021298] env[61978]: DEBUG oslo_vmware.api [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Task: {'id': task-1396136, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143496} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.021607] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1457.021745] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1457.021927] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1457.022131] env[61978]: INFO nova.compute.manager [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1457.022559] env[61978]: DEBUG oslo.service.loopingcall [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1457.022799] env[61978]: DEBUG nova.compute.manager [-] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1457.022895] env[61978]: DEBUG nova.network.neutron [-] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1457.459610] env[61978]: DEBUG nova.compute.manager [req-07f058e6-6d6d-4f8d-a6b0-a54584f1887f req-6ba9d3ea-f46a-4bf1-b8a1-2daa74f64efc service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Received event network-vif-deleted-e1688d68-33f0-48a2-8d22-475b9f9bacfb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1457.459674] env[61978]: INFO nova.compute.manager [req-07f058e6-6d6d-4f8d-a6b0-a54584f1887f req-6ba9d3ea-f46a-4bf1-b8a1-2daa74f64efc service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Neutron deleted interface e1688d68-33f0-48a2-8d22-475b9f9bacfb; detaching it from the instance and deleting it from the info cache [ 1457.459884] env[61978]: DEBUG nova.network.neutron [req-07f058e6-6d6d-4f8d-a6b0-a54584f1887f req-6ba9d3ea-f46a-4bf1-b8a1-2daa74f64efc service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1457.937228] env[61978]: DEBUG nova.network.neutron [-] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1457.962615] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-35919aa3-5554-48c6-a016-0f69b32e381f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.974113] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e5e38c-24d4-4373-a842-a1f08ba7078c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.998923] env[61978]: DEBUG nova.compute.manager [req-07f058e6-6d6d-4f8d-a6b0-a54584f1887f req-6ba9d3ea-f46a-4bf1-b8a1-2daa74f64efc service nova] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Detach interface failed, port_id=e1688d68-33f0-48a2-8d22-475b9f9bacfb, reason: Instance 30d6cc11-0258-47aa-b083-7c103c91acf2 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1458.440103] env[61978]: INFO nova.compute.manager [-] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Took 1.42 seconds to deallocate network for instance. [ 1458.946391] env[61978]: DEBUG oslo_concurrency.lockutils [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.946710] env[61978]: DEBUG oslo_concurrency.lockutils [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.946968] env[61978]: DEBUG nova.objects.instance [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lazy-loading 'resources' on Instance uuid 30d6cc11-0258-47aa-b083-7c103c91acf2 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1459.501899] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c8285a-c85c-42e0-9344-f660bc4999dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.509556] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2b891a-f98e-4216-af3b-4e666b84b931 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.539874] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca514bb6-838d-4d4d-9864-08007ac4ee2f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.546790] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd35331-4c50-4720-84d7-df34d25687af {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.559558] env[61978]: DEBUG nova.compute.provider_tree [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1460.062541] env[61978]: DEBUG nova.scheduler.client.report [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1460.567759] env[61978]: DEBUG oslo_concurrency.lockutils [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.621s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.589008] env[61978]: INFO nova.scheduler.client.report [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Deleted allocations for instance 30d6cc11-0258-47aa-b083-7c103c91acf2 [ 1461.097470] env[61978]: DEBUG oslo_concurrency.lockutils [None req-347622c6-c835-465b-99f5-6aaf5eb5cec8 tempest-ServerActionsTestJSON-798658854 tempest-ServerActionsTestJSON-798658854-project-member] Lock "30d6cc11-0258-47aa-b083-7c103c91acf2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.190s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.738663] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.739033] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1462.244348] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1462.244548] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1463.023082] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "6a454083-8d85-4a29-98dc-29eb0a072560" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.023376] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "6a454083-8d85-4a29-98dc-29eb0a072560" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.023558] env[61978]: INFO nova.compute.manager [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Shelving [ 1463.250117] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1463.250117] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.250370] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.250609] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.250811] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.252089] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.252089] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.252089] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1463.252089] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.534308] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1463.534708] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f234c703-ccaf-4a3c-b4ac-94dafdfd3544 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.542629] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1463.542629] env[61978]: value = "task-1396137" [ 1463.542629] env[61978]: _type = "Task" [ 1463.542629] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.552087] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396137, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.754592] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.754832] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.755015] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.755183] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1463.756203] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50666bbf-6878-454a-b15f-ba5a42d7a351 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.764069] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81fa9c2-9d97-4c74-bc6a-2208c833dc85 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.778492] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821ce770-310a-4e05-addd-0b5d461a194f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.784966] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aff6876-09c8-4b75-b22f-01a5e31102ec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.813262] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181307MB free_disk=185GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1463.813353] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.813528] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.053311] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396137, 'name': PowerOffVM_Task, 'duration_secs': 0.177553} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.053595] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1464.054391] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d53b7c-742e-4be2-adab-03d3b959dbef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.074754] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4faa96dc-5303-4923-b178-1e56bbd6e006 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.585574] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1464.585916] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d70342b5-1e56-4f1a-af24-8165df9bc290 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.594641] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1464.594641] env[61978]: value = "task-1396138" [ 1464.594641] env[61978]: _type = "Task" [ 1464.594641] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.605648] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396138, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.839610] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 6a454083-8d85-4a29-98dc-29eb0a072560 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1464.839787] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance a6f73332-d0a5-4c52-8e38-8982e42ee62f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1464.839996] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1464.840179] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1464.883808] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de991fe-988f-4e42-8f73-50eaa95e74a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.892452] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fc3d7a-5c29-4ab8-a7ad-1084bffbd5e2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.923322] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99d79a5-5031-4ce8-a946-af73818e99f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.931126] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d267fe5-46f9-4c66-9ab4-b8821ccf2af0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.944584] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1465.106087] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396138, 'name': CreateSnapshot_Task, 'duration_secs': 0.428954} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.106456] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1465.107184] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dacd880-f75d-4d65-b45a-85ad79245a57 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.448069] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1465.625935] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1465.628136] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-646524df-7714-4328-a02a-69be0f75898c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.639514] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1465.639514] env[61978]: value = "task-1396139" [ 1465.639514] env[61978]: _type = "Task" [ 1465.639514] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.647407] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396139, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.953709] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1465.953876] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.140s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.123996] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "733c2f53-04d3-4a8b-a7c1-5194d7961a31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.124324] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "733c2f53-04d3-4a8b-a7c1-5194d7961a31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.151748] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396139, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.627019] env[61978]: DEBUG nova.compute.manager [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1466.651329] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396139, 'name': CloneVM_Task} progress is 95%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.151319] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396139, 'name': CloneVM_Task, 'duration_secs': 1.19703} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.152517] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.152821] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.154632] env[61978]: INFO nova.compute.claims [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1467.157622] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Created linked-clone VM from snapshot [ 1467.159190] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58332b63-10b8-400b-ab20-7ef8a5446cc8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.168164] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Uploading image 733843c8-cf02-4c91-9365-b1d9e9aaa3be {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1467.197028] env[61978]: DEBUG oslo_vmware.rw_handles [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1467.197028] env[61978]: value = "vm-296074" [ 1467.197028] env[61978]: _type = "VirtualMachine" [ 1467.197028] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1467.197310] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7cde88f4-0899-4c2d-a3d7-5fec87e78b85 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.204499] env[61978]: DEBUG oslo_vmware.rw_handles [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lease: (returnval){ [ 1467.204499] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d70c9d-a57b-fe7b-f30d-7007a816c9c9" [ 1467.204499] env[61978]: _type = "HttpNfcLease" [ 1467.204499] env[61978]: } obtained for exporting VM: (result){ [ 1467.204499] env[61978]: value = "vm-296074" [ 1467.204499] env[61978]: _type = "VirtualMachine" [ 1467.204499] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1467.204756] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the lease: (returnval){ [ 1467.204756] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d70c9d-a57b-fe7b-f30d-7007a816c9c9" [ 1467.204756] env[61978]: _type = "HttpNfcLease" [ 1467.204756] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1467.210783] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1467.210783] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d70c9d-a57b-fe7b-f30d-7007a816c9c9" [ 1467.210783] env[61978]: _type = "HttpNfcLease" [ 1467.210783] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1467.476561] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.476814] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.713274] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1467.713274] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d70c9d-a57b-fe7b-f30d-7007a816c9c9" [ 1467.713274] env[61978]: _type = "HttpNfcLease" [ 1467.713274] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1467.713724] env[61978]: DEBUG oslo_vmware.rw_handles [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1467.713724] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d70c9d-a57b-fe7b-f30d-7007a816c9c9" [ 1467.713724] env[61978]: _type = "HttpNfcLease" [ 1467.713724] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1467.714484] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb275c7e-0707-4677-85b6-8b5fbf2d9547 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.722393] env[61978]: DEBUG oslo_vmware.rw_handles [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524377ac-c3f0-fbb3-721f-738bec352930/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1467.722663] env[61978]: DEBUG oslo_vmware.rw_handles [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524377ac-c3f0-fbb3-721f-738bec352930/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1467.808070] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-02f8ceac-eefe-4bb9-a2ac-ea91ae81b673 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.981701] env[61978]: DEBUG nova.compute.utils [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1468.220290] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2da780-e909-421a-baf7-65e52be2a343 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.228134] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98796505-0bf3-4309-8210-9a55fa7009fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.258419] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d94b96f-cf56-4b4e-89b4-c2f1336c389e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.266907] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6cdf5ef-abdb-47f2-a7f7-4e34a96bfa31 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.281960] env[61978]: DEBUG nova.compute.provider_tree [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1468.485133] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.787833] env[61978]: DEBUG nova.scheduler.client.report [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1469.293217] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.140s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.293737] env[61978]: DEBUG nova.compute.manager [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1469.547184] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.547488] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.547796] env[61978]: INFO nova.compute.manager [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Attaching volume a4d41e66-b964-4608-8f3d-36e9fc95a5ef to /dev/sdb [ 1469.584541] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04913503-ffdd-4003-9f77-d164decc0c2a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.591734] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15e5495-0073-4198-9cc9-54ec6fa75802 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.604911] env[61978]: DEBUG nova.virt.block_device [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Updating existing volume attachment record: 7e78b995-9b58-4b62-ba08-892f5aad3281 {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1469.798349] env[61978]: DEBUG nova.compute.utils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1469.799865] env[61978]: DEBUG nova.compute.manager [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1469.800513] env[61978]: DEBUG nova.network.neutron [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1469.849264] env[61978]: DEBUG nova.policy [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df950ca91cd64479950545608f749fb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaa4a0cb1a4c45949b43032fd9395200', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1470.106325] env[61978]: DEBUG nova.network.neutron [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Successfully created port: 850a6613-240f-4bb6-a3bd-cd95cd2ebe18 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1470.306467] env[61978]: DEBUG nova.compute.manager [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1471.314779] env[61978]: DEBUG nova.compute.manager [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1471.353953] env[61978]: DEBUG nova.virt.hardware [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1471.354245] env[61978]: DEBUG nova.virt.hardware [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1471.354464] env[61978]: DEBUG nova.virt.hardware [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1471.354720] env[61978]: DEBUG nova.virt.hardware [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1471.354884] env[61978]: DEBUG nova.virt.hardware [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1471.355052] env[61978]: DEBUG nova.virt.hardware [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1471.355278] env[61978]: DEBUG nova.virt.hardware [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1471.355445] env[61978]: DEBUG nova.virt.hardware [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1471.355621] env[61978]: DEBUG nova.virt.hardware [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1471.355788] env[61978]: DEBUG nova.virt.hardware [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1471.355965] env[61978]: DEBUG nova.virt.hardware [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1471.356843] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89661565-24a9-4537-a9bd-62b5d3d1c430 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.365048] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d56229-977e-4452-8d5e-dca00c10c0a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.512871] env[61978]: DEBUG nova.compute.manager [req-99b4d61c-7eb4-42f6-871d-77feb76d2bea req-2502d1d8-9687-4511-addb-09f94065a16c service nova] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Received event network-vif-plugged-850a6613-240f-4bb6-a3bd-cd95cd2ebe18 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1471.513142] env[61978]: DEBUG oslo_concurrency.lockutils [req-99b4d61c-7eb4-42f6-871d-77feb76d2bea req-2502d1d8-9687-4511-addb-09f94065a16c service nova] Acquiring lock "733c2f53-04d3-4a8b-a7c1-5194d7961a31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.513353] env[61978]: DEBUG oslo_concurrency.lockutils [req-99b4d61c-7eb4-42f6-871d-77feb76d2bea req-2502d1d8-9687-4511-addb-09f94065a16c service nova] Lock "733c2f53-04d3-4a8b-a7c1-5194d7961a31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.513492] env[61978]: DEBUG oslo_concurrency.lockutils [req-99b4d61c-7eb4-42f6-871d-77feb76d2bea req-2502d1d8-9687-4511-addb-09f94065a16c service nova] Lock "733c2f53-04d3-4a8b-a7c1-5194d7961a31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.513665] env[61978]: DEBUG nova.compute.manager [req-99b4d61c-7eb4-42f6-871d-77feb76d2bea req-2502d1d8-9687-4511-addb-09f94065a16c service nova] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] No waiting events found dispatching network-vif-plugged-850a6613-240f-4bb6-a3bd-cd95cd2ebe18 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1471.513833] env[61978]: WARNING nova.compute.manager [req-99b4d61c-7eb4-42f6-871d-77feb76d2bea req-2502d1d8-9687-4511-addb-09f94065a16c service nova] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Received unexpected event network-vif-plugged-850a6613-240f-4bb6-a3bd-cd95cd2ebe18 for instance with vm_state building and task_state spawning. [ 1471.600488] env[61978]: DEBUG nova.network.neutron [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Successfully updated port: 850a6613-240f-4bb6-a3bd-cd95cd2ebe18 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1472.102958] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "refresh_cache-733c2f53-04d3-4a8b-a7c1-5194d7961a31" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1472.103177] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "refresh_cache-733c2f53-04d3-4a8b-a7c1-5194d7961a31" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.103312] env[61978]: DEBUG nova.network.neutron [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1472.635338] env[61978]: DEBUG nova.network.neutron [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1472.768901] env[61978]: DEBUG nova.network.neutron [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Updating instance_info_cache with network_info: [{"id": "850a6613-240f-4bb6-a3bd-cd95cd2ebe18", "address": "fa:16:3e:c6:16:a6", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap850a6613-24", "ovs_interfaceid": "850a6613-240f-4bb6-a3bd-cd95cd2ebe18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1473.272317] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "refresh_cache-733c2f53-04d3-4a8b-a7c1-5194d7961a31" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.272640] env[61978]: DEBUG nova.compute.manager [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Instance network_info: |[{"id": "850a6613-240f-4bb6-a3bd-cd95cd2ebe18", "address": "fa:16:3e:c6:16:a6", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap850a6613-24", "ovs_interfaceid": "850a6613-240f-4bb6-a3bd-cd95cd2ebe18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1473.273246] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:16:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb68953b-dee5-4d9d-b47b-277336ba76dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '850a6613-240f-4bb6-a3bd-cd95cd2ebe18', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1473.280983] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Creating folder: Project (aaa4a0cb1a4c45949b43032fd9395200). Parent ref: group-v295764. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1473.281324] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9a63adb-9d2a-4e60-98ce-e5420c949842 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.292261] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Created folder: Project (aaa4a0cb1a4c45949b43032fd9395200) in parent group-v295764. [ 1473.292479] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Creating folder: Instances. Parent ref: group-v296077. {{(pid=61978) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1473.292739] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9831dfd3-7a1f-4ddd-bb43-c5e9608e0d39 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.306526] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Created folder: Instances in parent group-v296077. [ 1473.306781] env[61978]: DEBUG oslo.service.loopingcall [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1473.306982] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1473.307243] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27a51b6c-1111-4c9b-a717-b497d6eebf17 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.327705] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1473.327705] env[61978]: value = "task-1396147" [ 1473.327705] env[61978]: _type = "Task" [ 1473.327705] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.335839] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396147, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.540331] env[61978]: DEBUG nova.compute.manager [req-dde0dbb4-36da-41f2-a938-00e407289ff4 req-024f271b-c33e-4be1-9704-78d6c1ddf188 service nova] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Received event network-changed-850a6613-240f-4bb6-a3bd-cd95cd2ebe18 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1473.540562] env[61978]: DEBUG nova.compute.manager [req-dde0dbb4-36da-41f2-a938-00e407289ff4 req-024f271b-c33e-4be1-9704-78d6c1ddf188 service nova] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Refreshing instance network info cache due to event network-changed-850a6613-240f-4bb6-a3bd-cd95cd2ebe18. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1473.540754] env[61978]: DEBUG oslo_concurrency.lockutils [req-dde0dbb4-36da-41f2-a938-00e407289ff4 req-024f271b-c33e-4be1-9704-78d6c1ddf188 service nova] Acquiring lock "refresh_cache-733c2f53-04d3-4a8b-a7c1-5194d7961a31" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.540904] env[61978]: DEBUG oslo_concurrency.lockutils [req-dde0dbb4-36da-41f2-a938-00e407289ff4 req-024f271b-c33e-4be1-9704-78d6c1ddf188 service nova] Acquired lock "refresh_cache-733c2f53-04d3-4a8b-a7c1-5194d7961a31" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.541083] env[61978]: DEBUG nova.network.neutron [req-dde0dbb4-36da-41f2-a938-00e407289ff4 req-024f271b-c33e-4be1-9704-78d6c1ddf188 service nova] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Refreshing network info cache for port 850a6613-240f-4bb6-a3bd-cd95cd2ebe18 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1473.837979] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396147, 'name': CreateVM_Task, 'duration_secs': 0.425194} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.838389] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1473.838907] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.839148] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.839495] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1473.839757] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3c9d837-bd79-4931-84f4-075461b461f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.844445] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1473.844445] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52dd4c00-b175-2df2-67d5-d4e41764e65d" [ 1473.844445] env[61978]: _type = "Task" [ 1473.844445] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.852243] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52dd4c00-b175-2df2-67d5-d4e41764e65d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.241300] env[61978]: DEBUG nova.network.neutron [req-dde0dbb4-36da-41f2-a938-00e407289ff4 req-024f271b-c33e-4be1-9704-78d6c1ddf188 service nova] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Updated VIF entry in instance network info cache for port 850a6613-240f-4bb6-a3bd-cd95cd2ebe18. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1474.242039] env[61978]: DEBUG nova.network.neutron [req-dde0dbb4-36da-41f2-a938-00e407289ff4 req-024f271b-c33e-4be1-9704-78d6c1ddf188 service nova] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Updating instance_info_cache with network_info: [{"id": "850a6613-240f-4bb6-a3bd-cd95cd2ebe18", "address": "fa:16:3e:c6:16:a6", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap850a6613-24", "ovs_interfaceid": "850a6613-240f-4bb6-a3bd-cd95cd2ebe18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.354787] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52dd4c00-b175-2df2-67d5-d4e41764e65d, 'name': SearchDatastore_Task, 'duration_secs': 0.029005} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.355083] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.355334] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1474.355573] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.355726] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.355913] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1474.356200] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7cecddce-f2a5-4584-9627-de073c7fa069 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.364134] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1474.364333] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1474.364994] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb2d97c4-d151-4a8f-9609-d9e8cfe12651 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.370267] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1474.370267] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ace77e-36e8-8a50-be1b-deed4a1901b6" [ 1474.370267] env[61978]: _type = "Task" [ 1474.370267] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.377452] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ace77e-36e8-8a50-be1b-deed4a1901b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.651324] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1474.651572] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296076', 'volume_id': 'a4d41e66-b964-4608-8f3d-36e9fc95a5ef', 'name': 'volume-a4d41e66-b964-4608-8f3d-36e9fc95a5ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6f73332-d0a5-4c52-8e38-8982e42ee62f', 'attached_at': '', 'detached_at': '', 'volume_id': 'a4d41e66-b964-4608-8f3d-36e9fc95a5ef', 'serial': 'a4d41e66-b964-4608-8f3d-36e9fc95a5ef'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1474.652477] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ebc7c7-a706-4a11-bdc9-0c5977473d43 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.668466] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013f7dbf-b876-457e-a46b-9e156d951157 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.693376] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] volume-a4d41e66-b964-4608-8f3d-36e9fc95a5ef/volume-a4d41e66-b964-4608-8f3d-36e9fc95a5ef.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1474.693702] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50e5dcc4-d526-4c18-b403-1955e8321b2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.712255] env[61978]: DEBUG oslo_vmware.api [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1474.712255] env[61978]: value = "task-1396148" [ 1474.712255] env[61978]: _type = "Task" [ 1474.712255] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.720411] env[61978]: DEBUG oslo_vmware.api [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396148, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.745288] env[61978]: DEBUG oslo_concurrency.lockutils [req-dde0dbb4-36da-41f2-a938-00e407289ff4 req-024f271b-c33e-4be1-9704-78d6c1ddf188 service nova] Releasing lock "refresh_cache-733c2f53-04d3-4a8b-a7c1-5194d7961a31" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.881312] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ace77e-36e8-8a50-be1b-deed4a1901b6, 'name': SearchDatastore_Task, 'duration_secs': 0.013508} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.882113] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdcf8d23-732d-4ae4-af93-60d3d0f65af2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.887365] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1474.887365] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52489b37-56e1-3b97-e25d-9ed7ca46fbce" [ 1474.887365] env[61978]: _type = "Task" [ 1474.887365] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.894665] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52489b37-56e1-3b97-e25d-9ed7ca46fbce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.222567] env[61978]: DEBUG oslo_vmware.api [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396148, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.398050] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52489b37-56e1-3b97-e25d-9ed7ca46fbce, 'name': SearchDatastore_Task, 'duration_secs': 0.012835} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.398385] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.398685] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 733c2f53-04d3-4a8b-a7c1-5194d7961a31/733c2f53-04d3-4a8b-a7c1-5194d7961a31.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1475.398985] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12ba5be1-f910-429c-8136-5c1be882379b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.405156] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1475.405156] env[61978]: value = "task-1396149" [ 1475.405156] env[61978]: _type = "Task" [ 1475.405156] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.412682] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396149, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.557142] env[61978]: DEBUG oslo_vmware.rw_handles [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524377ac-c3f0-fbb3-721f-738bec352930/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1475.558194] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b5739b-d497-4018-bc06-8c34b52ee84d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.564376] env[61978]: DEBUG oslo_vmware.rw_handles [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524377ac-c3f0-fbb3-721f-738bec352930/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1475.564569] env[61978]: ERROR oslo_vmware.rw_handles [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524377ac-c3f0-fbb3-721f-738bec352930/disk-0.vmdk due to incomplete transfer. [ 1475.564788] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a033a6b3-09e6-454e-97a4-140ae3d57b16 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.570626] env[61978]: DEBUG oslo_vmware.rw_handles [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524377ac-c3f0-fbb3-721f-738bec352930/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1475.570846] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Uploaded image 733843c8-cf02-4c91-9365-b1d9e9aaa3be to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1475.573169] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1475.573403] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b946a7a5-c01e-4fb6-a80e-bc85860326a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.578611] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1475.578611] env[61978]: value = "task-1396150" [ 1475.578611] env[61978]: _type = "Task" [ 1475.578611] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.585746] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396150, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.722808] env[61978]: DEBUG oslo_vmware.api [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396148, 'name': ReconfigVM_Task, 'duration_secs': 0.566596} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.723115] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Reconfigured VM instance instance-00000072 to attach disk [datastore2] volume-a4d41e66-b964-4608-8f3d-36e9fc95a5ef/volume-a4d41e66-b964-4608-8f3d-36e9fc95a5ef.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1475.727728] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-655a5759-bc16-4d4a-a54c-2260bb32d96b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.743234] env[61978]: DEBUG oslo_vmware.api [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1475.743234] env[61978]: value = "task-1396151" [ 1475.743234] env[61978]: _type = "Task" [ 1475.743234] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.752864] env[61978]: DEBUG oslo_vmware.api [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396151, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.914655] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396149, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507008} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.915040] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 733c2f53-04d3-4a8b-a7c1-5194d7961a31/733c2f53-04d3-4a8b-a7c1-5194d7961a31.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1475.915157] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1475.915432] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f399c68-4e9f-4949-ad23-7872cf2561be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.922063] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1475.922063] env[61978]: value = "task-1396152" [ 1475.922063] env[61978]: _type = "Task" [ 1475.922063] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.928831] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396152, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.088181] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396150, 'name': Destroy_Task} progress is 33%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.252393] env[61978]: DEBUG oslo_vmware.api [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396151, 'name': ReconfigVM_Task, 'duration_secs': 0.240156} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.252553] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296076', 'volume_id': 'a4d41e66-b964-4608-8f3d-36e9fc95a5ef', 'name': 'volume-a4d41e66-b964-4608-8f3d-36e9fc95a5ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6f73332-d0a5-4c52-8e38-8982e42ee62f', 'attached_at': '', 'detached_at': '', 'volume_id': 'a4d41e66-b964-4608-8f3d-36e9fc95a5ef', 'serial': 'a4d41e66-b964-4608-8f3d-36e9fc95a5ef'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1476.432015] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396152, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061327} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.432015] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1476.432347] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c240f8-e932-403b-bf8a-ee11d8879f1b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.454014] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] 733c2f53-04d3-4a8b-a7c1-5194d7961a31/733c2f53-04d3-4a8b-a7c1-5194d7961a31.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1476.454267] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d457393-87a7-4d5d-a848-bd5ab67533de {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.473859] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1476.473859] env[61978]: value = "task-1396153" [ 1476.473859] env[61978]: _type = "Task" [ 1476.473859] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.482579] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396153, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.588937] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396150, 'name': Destroy_Task, 'duration_secs': 0.584284} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.589152] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Destroyed the VM [ 1476.589367] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1476.589617] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5bcd2492-9fe7-4689-9c8c-f4a04207d0a5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.595500] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1476.595500] env[61978]: value = "task-1396154" [ 1476.595500] env[61978]: _type = "Task" [ 1476.595500] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.602794] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396154, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.984965] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396153, 'name': ReconfigVM_Task, 'duration_secs': 0.300431} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.985340] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Reconfigured VM instance instance-00000074 to attach disk [datastore2] 733c2f53-04d3-4a8b-a7c1-5194d7961a31/733c2f53-04d3-4a8b-a7c1-5194d7961a31.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1476.985951] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d2166a7-e9bd-4fa0-8b4f-fd4940dd7d7e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.992080] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1476.992080] env[61978]: value = "task-1396155" [ 1476.992080] env[61978]: _type = "Task" [ 1476.992080] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.999406] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396155, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.104944] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396154, 'name': RemoveSnapshot_Task, 'duration_secs': 0.382423} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.105252] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1477.105540] env[61978]: DEBUG nova.compute.manager [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1477.106307] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19242db9-a494-4ff5-9c7a-5bb84a4846f9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.288093] env[61978]: DEBUG nova.objects.instance [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lazy-loading 'flavor' on Instance uuid a6f73332-d0a5-4c52-8e38-8982e42ee62f {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1477.502431] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396155, 'name': Rename_Task, 'duration_secs': 0.155133} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.502716] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1477.502964] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0818d7f4-8e5c-4b3b-a236-e19d824bd8bd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.509859] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1477.509859] env[61978]: value = "task-1396156" [ 1477.509859] env[61978]: _type = "Task" [ 1477.509859] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.517036] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396156, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.619023] env[61978]: INFO nova.compute.manager [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Shelve offloading [ 1477.620685] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1477.620981] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad119ca1-7baa-49cb-97f4-dbba77495633 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.628566] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1477.628566] env[61978]: value = "task-1396157" [ 1477.628566] env[61978]: _type = "Task" [ 1477.628566] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.636852] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396157, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.796476] env[61978]: DEBUG oslo_concurrency.lockutils [None req-99d83643-debf-481f-b80f-85bd018af8a7 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.248s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.020569] env[61978]: DEBUG oslo_vmware.api [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396156, 'name': PowerOnVM_Task, 'duration_secs': 0.418658} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.023115] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1478.023115] env[61978]: INFO nova.compute.manager [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Took 6.71 seconds to spawn the instance on the hypervisor. [ 1478.023115] env[61978]: DEBUG nova.compute.manager [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1478.023115] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba1b36f-4ec3-4bfe-83ad-535155361507 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.140738] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1478.140855] env[61978]: DEBUG nova.compute.manager [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1478.141650] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c383ebb-ed5a-4e11-b898-a7d6e4672874 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.147921] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.148156] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.148351] env[61978]: DEBUG nova.network.neutron [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1478.541675] env[61978]: INFO nova.compute.manager [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Took 11.41 seconds to build instance. [ 1478.857278] env[61978]: DEBUG nova.network.neutron [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updating instance_info_cache with network_info: [{"id": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "address": "fa:16:3e:c3:c6:fc", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c4911ae-3f", "ovs_interfaceid": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.987341] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "77a8cde0-b046-4970-9979-9d4b85a224e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.987589] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "77a8cde0-b046-4970-9979-9d4b85a224e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.044069] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c62ba154-c959-43f9-9331-c0ca07644925 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "733c2f53-04d3-4a8b-a7c1-5194d7961a31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.920s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.359963] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.490691] env[61978]: DEBUG nova.compute.manager [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1479.618881] env[61978]: DEBUG nova.compute.manager [req-adef8e05-7b7c-46c5-ba04-7fb45d87e882 req-3b67532d-6c2a-4082-9f43-d09bc9c78f46 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Received event network-vif-unplugged-6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1479.619535] env[61978]: DEBUG oslo_concurrency.lockutils [req-adef8e05-7b7c-46c5-ba04-7fb45d87e882 req-3b67532d-6c2a-4082-9f43-d09bc9c78f46 service nova] Acquiring lock "6a454083-8d85-4a29-98dc-29eb0a072560-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.619535] env[61978]: DEBUG oslo_concurrency.lockutils [req-adef8e05-7b7c-46c5-ba04-7fb45d87e882 req-3b67532d-6c2a-4082-9f43-d09bc9c78f46 service nova] Lock "6a454083-8d85-4a29-98dc-29eb0a072560-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.619652] env[61978]: DEBUG oslo_concurrency.lockutils [req-adef8e05-7b7c-46c5-ba04-7fb45d87e882 req-3b67532d-6c2a-4082-9f43-d09bc9c78f46 service nova] Lock "6a454083-8d85-4a29-98dc-29eb0a072560-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.619832] env[61978]: DEBUG nova.compute.manager [req-adef8e05-7b7c-46c5-ba04-7fb45d87e882 req-3b67532d-6c2a-4082-9f43-d09bc9c78f46 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] No waiting events found dispatching network-vif-unplugged-6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1479.620013] env[61978]: WARNING nova.compute.manager [req-adef8e05-7b7c-46c5-ba04-7fb45d87e882 req-3b67532d-6c2a-4082-9f43-d09bc9c78f46 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Received unexpected event network-vif-unplugged-6c4911ae-3f5e-46c7-9538-7e1a9811252e for instance with vm_state shelved and task_state shelving_offloading. [ 1479.711364] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1479.712277] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a183b915-a451-4c7c-99d4-ebebe250fb3a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.720501] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1479.720793] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d8a0397-343c-4ae5-b4e5-71ad3ab814ca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.782724] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1479.782960] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1479.783176] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleting the datastore file [datastore2] 6a454083-8d85-4a29-98dc-29eb0a072560 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1479.783516] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5840804b-8b18-444d-8960-5fbb80307f6b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.790199] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1479.790199] env[61978]: value = "task-1396159" [ 1479.790199] env[61978]: _type = "Task" [ 1479.790199] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.797925] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396159, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.942800] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "116a3384-8bf3-49c6-9ee0-01d2781c69d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.943052] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "116a3384-8bf3-49c6-9ee0-01d2781c69d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.011610] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.011865] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.013329] env[61978]: INFO nova.compute.claims [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1480.299992] env[61978]: DEBUG oslo_vmware.api [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396159, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133794} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.300361] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1480.300420] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1480.300603] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1480.321575] env[61978]: INFO nova.scheduler.client.report [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleted allocations for instance 6a454083-8d85-4a29-98dc-29eb0a072560 [ 1480.445656] env[61978]: DEBUG nova.compute.manager [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1480.827061] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.964726] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.080463] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed88e1a-f6e4-41c4-8f9a-5a5afe8ed012 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.088191] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76456fc-1f5c-4477-b8fd-5de1b75c5708 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.118515] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eadf2425-1fb0-44ad-af55-bd4537d33124 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.125798] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4749e1fa-a67b-41a8-a348-67e6a8e2d0d0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.138955] env[61978]: DEBUG nova.compute.provider_tree [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1481.642220] env[61978]: DEBUG nova.scheduler.client.report [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1481.647812] env[61978]: DEBUG nova.compute.manager [req-94d302f6-b3ed-44a6-b497-59daf36fc774 req-8aa26ccb-7651-4361-b01f-6b107bf1f043 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Received event network-changed-6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1481.648030] env[61978]: DEBUG nova.compute.manager [req-94d302f6-b3ed-44a6-b497-59daf36fc774 req-8aa26ccb-7651-4361-b01f-6b107bf1f043 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Refreshing instance network info cache due to event network-changed-6c4911ae-3f5e-46c7-9538-7e1a9811252e. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1481.648334] env[61978]: DEBUG oslo_concurrency.lockutils [req-94d302f6-b3ed-44a6-b497-59daf36fc774 req-8aa26ccb-7651-4361-b01f-6b107bf1f043 service nova] Acquiring lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.648500] env[61978]: DEBUG oslo_concurrency.lockutils [req-94d302f6-b3ed-44a6-b497-59daf36fc774 req-8aa26ccb-7651-4361-b01f-6b107bf1f043 service nova] Acquired lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.648675] env[61978]: DEBUG nova.network.neutron [req-94d302f6-b3ed-44a6-b497-59daf36fc774 req-8aa26ccb-7651-4361-b01f-6b107bf1f043 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Refreshing network info cache for port 6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1482.151594] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.139s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.152152] env[61978]: DEBUG nova.compute.manager [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1482.157112] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.330s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.157386] env[61978]: DEBUG nova.objects.instance [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lazy-loading 'resources' on Instance uuid 6a454083-8d85-4a29-98dc-29eb0a072560 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1482.169915] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "6a454083-8d85-4a29-98dc-29eb0a072560" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.388977] env[61978]: DEBUG nova.network.neutron [req-94d302f6-b3ed-44a6-b497-59daf36fc774 req-8aa26ccb-7651-4361-b01f-6b107bf1f043 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updated VIF entry in instance network info cache for port 6c4911ae-3f5e-46c7-9538-7e1a9811252e. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1482.389432] env[61978]: DEBUG nova.network.neutron [req-94d302f6-b3ed-44a6-b497-59daf36fc774 req-8aa26ccb-7651-4361-b01f-6b107bf1f043 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updating instance_info_cache with network_info: [{"id": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "address": "fa:16:3e:c3:c6:fc", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap6c4911ae-3f", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.658854] env[61978]: DEBUG nova.compute.utils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1482.660556] env[61978]: DEBUG nova.compute.manager [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1482.660729] env[61978]: DEBUG nova.network.neutron [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1482.662720] env[61978]: DEBUG nova.objects.instance [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lazy-loading 'numa_topology' on Instance uuid 6a454083-8d85-4a29-98dc-29eb0a072560 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1482.699334] env[61978]: DEBUG nova.policy [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd72a836e3aef4b59b1092b91f33fd929', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b289cdad1fe4ad38c5d987680be2367', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1482.892053] env[61978]: DEBUG oslo_concurrency.lockutils [req-94d302f6-b3ed-44a6-b497-59daf36fc774 req-8aa26ccb-7651-4361-b01f-6b107bf1f043 service nova] Releasing lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.952701] env[61978]: DEBUG nova.network.neutron [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Successfully created port: d8c08c29-7a03-4668-9677-a2fd2c6b39eb {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1483.165441] env[61978]: DEBUG nova.compute.manager [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1483.168503] env[61978]: DEBUG nova.objects.base [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Object Instance<6a454083-8d85-4a29-98dc-29eb0a072560> lazy-loaded attributes: resources,numa_topology {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1483.246835] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58ff8d2-dde8-4cbf-a7dc-dc86dd91fdd7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.254583] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74caa8b1-73b9-454d-85c2-af6a93d32335 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.284209] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30fb020a-d204-4825-a5e9-fb18d2c879e5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.291224] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da77efc-dcd0-4673-afca-163bb8a4b2cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.304220] env[61978]: DEBUG nova.compute.provider_tree [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1483.807490] env[61978]: DEBUG nova.scheduler.client.report [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1484.179150] env[61978]: DEBUG nova.compute.manager [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1484.205461] env[61978]: DEBUG nova.virt.hardware [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1484.205730] env[61978]: DEBUG nova.virt.hardware [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1484.205894] env[61978]: DEBUG nova.virt.hardware [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1484.206095] env[61978]: DEBUG nova.virt.hardware [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1484.206258] env[61978]: DEBUG nova.virt.hardware [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1484.206495] env[61978]: DEBUG nova.virt.hardware [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1484.206617] env[61978]: DEBUG nova.virt.hardware [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1484.206778] env[61978]: DEBUG nova.virt.hardware [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1484.206947] env[61978]: DEBUG nova.virt.hardware [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1484.207128] env[61978]: DEBUG nova.virt.hardware [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1484.207311] env[61978]: DEBUG nova.virt.hardware [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1484.208218] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4035582f-e118-421f-8b6d-f614955dd1cb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.216496] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84d693c-8a99-4f0b-bfaa-3a1c957dc48e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.312135] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.155s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.316015] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.351s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.318193] env[61978]: INFO nova.compute.claims [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1484.322662] env[61978]: DEBUG nova.compute.manager [req-4ba562de-c400-4d70-977c-389daf40cf63 req-898662b0-690e-46ae-8c9a-efe865cbac23 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Received event network-vif-plugged-d8c08c29-7a03-4668-9677-a2fd2c6b39eb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1484.322968] env[61978]: DEBUG oslo_concurrency.lockutils [req-4ba562de-c400-4d70-977c-389daf40cf63 req-898662b0-690e-46ae-8c9a-efe865cbac23 service nova] Acquiring lock "77a8cde0-b046-4970-9979-9d4b85a224e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.323317] env[61978]: DEBUG oslo_concurrency.lockutils [req-4ba562de-c400-4d70-977c-389daf40cf63 req-898662b0-690e-46ae-8c9a-efe865cbac23 service nova] Lock "77a8cde0-b046-4970-9979-9d4b85a224e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.323608] env[61978]: DEBUG oslo_concurrency.lockutils [req-4ba562de-c400-4d70-977c-389daf40cf63 req-898662b0-690e-46ae-8c9a-efe865cbac23 service nova] Lock "77a8cde0-b046-4970-9979-9d4b85a224e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.323888] env[61978]: DEBUG nova.compute.manager [req-4ba562de-c400-4d70-977c-389daf40cf63 req-898662b0-690e-46ae-8c9a-efe865cbac23 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] No waiting events found dispatching network-vif-plugged-d8c08c29-7a03-4668-9677-a2fd2c6b39eb {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1484.324183] env[61978]: WARNING nova.compute.manager [req-4ba562de-c400-4d70-977c-389daf40cf63 req-898662b0-690e-46ae-8c9a-efe865cbac23 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Received unexpected event network-vif-plugged-d8c08c29-7a03-4668-9677-a2fd2c6b39eb for instance with vm_state building and task_state spawning. [ 1484.409227] env[61978]: DEBUG nova.network.neutron [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Successfully updated port: d8c08c29-7a03-4668-9677-a2fd2c6b39eb {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1484.832158] env[61978]: DEBUG oslo_concurrency.lockutils [None req-17b5d0bd-8260-493f-bea7-8ecaaf7ec32b tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "6a454083-8d85-4a29-98dc-29eb0a072560" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.809s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.833020] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "6a454083-8d85-4a29-98dc-29eb0a072560" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.663s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.833255] env[61978]: INFO nova.compute.manager [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Unshelving [ 1484.911261] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "refresh_cache-77a8cde0-b046-4970-9979-9d4b85a224e2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.911767] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired lock "refresh_cache-77a8cde0-b046-4970-9979-9d4b85a224e2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.911767] env[61978]: DEBUG nova.network.neutron [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1485.421731] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f02b761-c027-4471-8eb0-5450027bf1fd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.429807] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b36aad-3fb0-44d0-8aae-457496772354 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.462210] env[61978]: DEBUG nova.network.neutron [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1485.464596] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7835946c-6d37-4562-a073-f2511c412088 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.473337] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6873f1e4-c05b-439c-b795-e3941e116fa9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.487263] env[61978]: DEBUG nova.compute.provider_tree [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.826108] env[61978]: DEBUG nova.network.neutron [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Updating instance_info_cache with network_info: [{"id": "d8c08c29-7a03-4668-9677-a2fd2c6b39eb", "address": "fa:16:3e:6b:21:d2", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8c08c29-7a", "ovs_interfaceid": "d8c08c29-7a03-4668-9677-a2fd2c6b39eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.844526] env[61978]: DEBUG nova.compute.utils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1485.992957] env[61978]: DEBUG nova.scheduler.client.report [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1486.329055] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Releasing lock "refresh_cache-77a8cde0-b046-4970-9979-9d4b85a224e2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.329209] env[61978]: DEBUG nova.compute.manager [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Instance network_info: |[{"id": "d8c08c29-7a03-4668-9677-a2fd2c6b39eb", "address": "fa:16:3e:6b:21:d2", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8c08c29-7a", "ovs_interfaceid": "d8c08c29-7a03-4668-9677-a2fd2c6b39eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1486.329650] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:21:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '67921bdb-a7a0-46b5-ba05-ca997496e222', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd8c08c29-7a03-4668-9677-a2fd2c6b39eb', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1486.337499] env[61978]: DEBUG oslo.service.loopingcall [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1486.337728] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1486.337958] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04fabba9-ef74-490d-804e-40788ca04ce0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.355566] env[61978]: INFO nova.virt.block_device [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Booting with volume 92c75742-2007-44d7-9c7e-705254285c91 at /dev/sdb [ 1486.357923] env[61978]: DEBUG nova.compute.manager [req-0694fc2a-6bf7-406c-9276-f15bc4ebfc30 req-aff27404-8ee0-4ef7-98a6-1cb2c5075c58 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Received event network-changed-d8c08c29-7a03-4668-9677-a2fd2c6b39eb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1486.358160] env[61978]: DEBUG nova.compute.manager [req-0694fc2a-6bf7-406c-9276-f15bc4ebfc30 req-aff27404-8ee0-4ef7-98a6-1cb2c5075c58 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Refreshing instance network info cache due to event network-changed-d8c08c29-7a03-4668-9677-a2fd2c6b39eb. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1486.358382] env[61978]: DEBUG oslo_concurrency.lockutils [req-0694fc2a-6bf7-406c-9276-f15bc4ebfc30 req-aff27404-8ee0-4ef7-98a6-1cb2c5075c58 service nova] Acquiring lock "refresh_cache-77a8cde0-b046-4970-9979-9d4b85a224e2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.358535] env[61978]: DEBUG oslo_concurrency.lockutils [req-0694fc2a-6bf7-406c-9276-f15bc4ebfc30 req-aff27404-8ee0-4ef7-98a6-1cb2c5075c58 service nova] Acquired lock "refresh_cache-77a8cde0-b046-4970-9979-9d4b85a224e2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.358701] env[61978]: DEBUG nova.network.neutron [req-0694fc2a-6bf7-406c-9276-f15bc4ebfc30 req-aff27404-8ee0-4ef7-98a6-1cb2c5075c58 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Refreshing network info cache for port d8c08c29-7a03-4668-9677-a2fd2c6b39eb {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1486.367815] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1486.367815] env[61978]: value = "task-1396160" [ 1486.367815] env[61978]: _type = "Task" [ 1486.367815] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.376033] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396160, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.393607] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33f17af6-76e5-4679-a038-35bc3f424ed2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.402146] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83d9d22-e2cd-4b92-aa94-c641491c1e5d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.426649] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0db39118-2283-47d5-b8a7-f6125a91ad5e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.434371] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e222ffea-4b5c-408b-8e2a-994da0b317a2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.459980] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5b16ef-2803-4e12-baf0-c6269ad1399f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.466048] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b979f4-db04-4660-8559-cecd34f29454 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.480082] env[61978]: DEBUG nova.virt.block_device [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updating existing volume attachment record: ba47d155-30d2-4d6e-8013-54e14c4c0217 {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1486.500119] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.184s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.500655] env[61978]: DEBUG nova.compute.manager [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1486.878043] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396160, 'name': CreateVM_Task, 'duration_secs': 0.293747} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.878043] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1486.878464] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.878640] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.878964] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1486.879261] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3b6102b-ea8a-4a9b-bf12-b9d955f60352 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.883442] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1486.883442] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d43caa-657c-5427-c351-bd317aee1950" [ 1486.883442] env[61978]: _type = "Task" [ 1486.883442] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.890725] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d43caa-657c-5427-c351-bd317aee1950, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.008024] env[61978]: DEBUG nova.compute.utils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1487.008024] env[61978]: DEBUG nova.compute.manager [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1487.008024] env[61978]: DEBUG nova.network.neutron [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1487.070782] env[61978]: DEBUG nova.policy [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df950ca91cd64479950545608f749fb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaa4a0cb1a4c45949b43032fd9395200', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1487.092593] env[61978]: DEBUG nova.network.neutron [req-0694fc2a-6bf7-406c-9276-f15bc4ebfc30 req-aff27404-8ee0-4ef7-98a6-1cb2c5075c58 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Updated VIF entry in instance network info cache for port d8c08c29-7a03-4668-9677-a2fd2c6b39eb. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1487.092934] env[61978]: DEBUG nova.network.neutron [req-0694fc2a-6bf7-406c-9276-f15bc4ebfc30 req-aff27404-8ee0-4ef7-98a6-1cb2c5075c58 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Updating instance_info_cache with network_info: [{"id": "d8c08c29-7a03-4668-9677-a2fd2c6b39eb", "address": "fa:16:3e:6b:21:d2", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8c08c29-7a", "ovs_interfaceid": "d8c08c29-7a03-4668-9677-a2fd2c6b39eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.324981] env[61978]: DEBUG nova.network.neutron [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Successfully created port: dbce7690-5815-4441-be23-3ad6e4b0b885 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1487.394478] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d43caa-657c-5427-c351-bd317aee1950, 'name': SearchDatastore_Task, 'duration_secs': 0.009222} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.394736] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.394972] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1487.395287] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.395455] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.395642] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1487.395903] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6bfc3ffd-dc5a-4a1d-aa38-39dd1d4b8d6e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.403780] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1487.404014] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1487.404702] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9c5cc41-bf91-4216-9f12-9cc76334bb5f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.409882] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1487.409882] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52825fd4-bfb1-a36e-6ac1-7144af3b66d8" [ 1487.409882] env[61978]: _type = "Task" [ 1487.409882] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.417112] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52825fd4-bfb1-a36e-6ac1-7144af3b66d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.511043] env[61978]: DEBUG nova.compute.manager [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1487.595600] env[61978]: DEBUG oslo_concurrency.lockutils [req-0694fc2a-6bf7-406c-9276-f15bc4ebfc30 req-aff27404-8ee0-4ef7-98a6-1cb2c5075c58 service nova] Releasing lock "refresh_cache-77a8cde0-b046-4970-9979-9d4b85a224e2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.921409] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52825fd4-bfb1-a36e-6ac1-7144af3b66d8, 'name': SearchDatastore_Task, 'duration_secs': 0.010774} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.922268] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-085f0194-ed5e-439f-8291-5053e572b309 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.927358] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1487.927358] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52085d29-e019-877e-3ecd-162fb1e11fd9" [ 1487.927358] env[61978]: _type = "Task" [ 1487.927358] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.935050] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52085d29-e019-877e-3ecd-162fb1e11fd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.437452] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52085d29-e019-877e-3ecd-162fb1e11fd9, 'name': SearchDatastore_Task, 'duration_secs': 0.009359} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.437731] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.437999] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 77a8cde0-b046-4970-9979-9d4b85a224e2/77a8cde0-b046-4970-9979-9d4b85a224e2.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1488.438302] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f0054e2-789f-4223-b5a0-1278d54342ba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.446590] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1488.446590] env[61978]: value = "task-1396164" [ 1488.446590] env[61978]: _type = "Task" [ 1488.446590] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.453747] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.521425] env[61978]: DEBUG nova.compute.manager [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1488.548712] env[61978]: DEBUG nova.virt.hardware [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1488.548969] env[61978]: DEBUG nova.virt.hardware [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1488.549153] env[61978]: DEBUG nova.virt.hardware [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1488.549356] env[61978]: DEBUG nova.virt.hardware [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1488.549510] env[61978]: DEBUG nova.virt.hardware [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1488.549662] env[61978]: DEBUG nova.virt.hardware [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1488.549874] env[61978]: DEBUG nova.virt.hardware [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1488.550085] env[61978]: DEBUG nova.virt.hardware [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1488.550273] env[61978]: DEBUG nova.virt.hardware [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1488.550442] env[61978]: DEBUG nova.virt.hardware [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1488.550617] env[61978]: DEBUG nova.virt.hardware [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1488.551484] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b65c139-ff25-4036-a2bb-24a058ec81ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.559134] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f408d48-500a-41d5-9ca0-5829287ca236 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.721457] env[61978]: DEBUG nova.compute.manager [req-87d1c958-91b9-40fc-9d1c-db1a0e625968 req-11a8717a-2215-45a8-9c2d-46c1bdcb34d7 service nova] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Received event network-vif-plugged-dbce7690-5815-4441-be23-3ad6e4b0b885 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1488.721651] env[61978]: DEBUG oslo_concurrency.lockutils [req-87d1c958-91b9-40fc-9d1c-db1a0e625968 req-11a8717a-2215-45a8-9c2d-46c1bdcb34d7 service nova] Acquiring lock "116a3384-8bf3-49c6-9ee0-01d2781c69d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.721882] env[61978]: DEBUG oslo_concurrency.lockutils [req-87d1c958-91b9-40fc-9d1c-db1a0e625968 req-11a8717a-2215-45a8-9c2d-46c1bdcb34d7 service nova] Lock "116a3384-8bf3-49c6-9ee0-01d2781c69d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.722105] env[61978]: DEBUG oslo_concurrency.lockutils [req-87d1c958-91b9-40fc-9d1c-db1a0e625968 req-11a8717a-2215-45a8-9c2d-46c1bdcb34d7 service nova] Lock "116a3384-8bf3-49c6-9ee0-01d2781c69d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.722311] env[61978]: DEBUG nova.compute.manager [req-87d1c958-91b9-40fc-9d1c-db1a0e625968 req-11a8717a-2215-45a8-9c2d-46c1bdcb34d7 service nova] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] No waiting events found dispatching network-vif-plugged-dbce7690-5815-4441-be23-3ad6e4b0b885 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1488.722483] env[61978]: WARNING nova.compute.manager [req-87d1c958-91b9-40fc-9d1c-db1a0e625968 req-11a8717a-2215-45a8-9c2d-46c1bdcb34d7 service nova] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Received unexpected event network-vif-plugged-dbce7690-5815-4441-be23-3ad6e4b0b885 for instance with vm_state building and task_state spawning. [ 1488.816684] env[61978]: DEBUG nova.network.neutron [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Successfully updated port: dbce7690-5815-4441-be23-3ad6e4b0b885 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1488.957262] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464257} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.957621] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 77a8cde0-b046-4970-9979-9d4b85a224e2/77a8cde0-b046-4970-9979-9d4b85a224e2.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1488.957757] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1488.958029] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28837dd5-becc-4a18-b1ab-1deaed6b53fe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.964724] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1488.964724] env[61978]: value = "task-1396166" [ 1488.964724] env[61978]: _type = "Task" [ 1488.964724] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.972179] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396166, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.320188] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "refresh_cache-116a3384-8bf3-49c6-9ee0-01d2781c69d5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.320369] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "refresh_cache-116a3384-8bf3-49c6-9ee0-01d2781c69d5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.320531] env[61978]: DEBUG nova.network.neutron [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1489.474242] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396166, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07555} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.474510] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1489.475267] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7578cca-1f40-46a0-9197-03d16af0f8a7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.497748] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] 77a8cde0-b046-4970-9979-9d4b85a224e2/77a8cde0-b046-4970-9979-9d4b85a224e2.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1489.498036] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41d6d841-a63a-4d5c-bee7-20d8ffb0256d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.516750] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1489.516750] env[61978]: value = "task-1396167" [ 1489.516750] env[61978]: _type = "Task" [ 1489.516750] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.524094] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396167, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.855015] env[61978]: DEBUG nova.network.neutron [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1489.976927] env[61978]: DEBUG nova.network.neutron [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Updating instance_info_cache with network_info: [{"id": "dbce7690-5815-4441-be23-3ad6e4b0b885", "address": "fa:16:3e:8f:54:b6", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbce7690-58", "ovs_interfaceid": "dbce7690-5815-4441-be23-3ad6e4b0b885", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.027016] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396167, 'name': ReconfigVM_Task, 'duration_secs': 0.319324} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.027786] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Reconfigured VM instance instance-00000075 to attach disk [datastore2] 77a8cde0-b046-4970-9979-9d4b85a224e2/77a8cde0-b046-4970-9979-9d4b85a224e2.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1490.027943] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-363d1555-ca78-42d2-b5c8-e87cadca6727 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.034442] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1490.034442] env[61978]: value = "task-1396168" [ 1490.034442] env[61978]: _type = "Task" [ 1490.034442] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.043637] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396168, 'name': Rename_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.479590] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "refresh_cache-116a3384-8bf3-49c6-9ee0-01d2781c69d5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.479874] env[61978]: DEBUG nova.compute.manager [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Instance network_info: |[{"id": "dbce7690-5815-4441-be23-3ad6e4b0b885", "address": "fa:16:3e:8f:54:b6", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbce7690-58", "ovs_interfaceid": "dbce7690-5815-4441-be23-3ad6e4b0b885", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1490.480413] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:54:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb68953b-dee5-4d9d-b47b-277336ba76dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dbce7690-5815-4441-be23-3ad6e4b0b885', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1490.488518] env[61978]: DEBUG oslo.service.loopingcall [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1490.488764] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1490.489049] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11511c43-ca83-404c-b267-799395b3ec2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.509748] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1490.509748] env[61978]: value = "task-1396169" [ 1490.509748] env[61978]: _type = "Task" [ 1490.509748] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.517212] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396169, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.543251] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396168, 'name': Rename_Task, 'duration_secs': 0.130372} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.543530] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1490.543792] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2888c121-380f-411e-8fee-ae601f125c06 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.548855] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1490.548855] env[61978]: value = "task-1396170" [ 1490.548855] env[61978]: _type = "Task" [ 1490.548855] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.556322] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396170, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.749212] env[61978]: DEBUG nova.compute.manager [req-f5af274f-0476-4a39-b1a1-0a03643465ac req-b0f7bc40-774c-4a04-bf82-4a52981b23d4 service nova] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Received event network-changed-dbce7690-5815-4441-be23-3ad6e4b0b885 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1490.749456] env[61978]: DEBUG nova.compute.manager [req-f5af274f-0476-4a39-b1a1-0a03643465ac req-b0f7bc40-774c-4a04-bf82-4a52981b23d4 service nova] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Refreshing instance network info cache due to event network-changed-dbce7690-5815-4441-be23-3ad6e4b0b885. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1490.749778] env[61978]: DEBUG oslo_concurrency.lockutils [req-f5af274f-0476-4a39-b1a1-0a03643465ac req-b0f7bc40-774c-4a04-bf82-4a52981b23d4 service nova] Acquiring lock "refresh_cache-116a3384-8bf3-49c6-9ee0-01d2781c69d5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.750202] env[61978]: DEBUG oslo_concurrency.lockutils [req-f5af274f-0476-4a39-b1a1-0a03643465ac req-b0f7bc40-774c-4a04-bf82-4a52981b23d4 service nova] Acquired lock "refresh_cache-116a3384-8bf3-49c6-9ee0-01d2781c69d5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.750202] env[61978]: DEBUG nova.network.neutron [req-f5af274f-0476-4a39-b1a1-0a03643465ac req-b0f7bc40-774c-4a04-bf82-4a52981b23d4 service nova] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Refreshing network info cache for port dbce7690-5815-4441-be23-3ad6e4b0b885 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1491.020278] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396169, 'name': CreateVM_Task, 'duration_secs': 0.353273} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.020616] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1491.021185] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.021371] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.021696] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1491.021955] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acdfe458-90a4-4188-bd3b-dbcbab20d258 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.027436] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1491.027436] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]525bf4c8-5688-cc03-e8d2-5e96472636a9" [ 1491.027436] env[61978]: _type = "Task" [ 1491.027436] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.035308] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]525bf4c8-5688-cc03-e8d2-5e96472636a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.058744] env[61978]: DEBUG oslo_vmware.api [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396170, 'name': PowerOnVM_Task, 'duration_secs': 0.49607} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.059064] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1491.059194] env[61978]: INFO nova.compute.manager [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Took 6.88 seconds to spawn the instance on the hypervisor. [ 1491.059410] env[61978]: DEBUG nova.compute.manager [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1491.060619] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c9ab2f-d560-43eb-859b-2e024de22c88 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.438685] env[61978]: DEBUG nova.network.neutron [req-f5af274f-0476-4a39-b1a1-0a03643465ac req-b0f7bc40-774c-4a04-bf82-4a52981b23d4 service nova] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Updated VIF entry in instance network info cache for port dbce7690-5815-4441-be23-3ad6e4b0b885. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1491.439082] env[61978]: DEBUG nova.network.neutron [req-f5af274f-0476-4a39-b1a1-0a03643465ac req-b0f7bc40-774c-4a04-bf82-4a52981b23d4 service nova] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Updating instance_info_cache with network_info: [{"id": "dbce7690-5815-4441-be23-3ad6e4b0b885", "address": "fa:16:3e:8f:54:b6", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbce7690-58", "ovs_interfaceid": "dbce7690-5815-4441-be23-3ad6e4b0b885", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.545562] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]525bf4c8-5688-cc03-e8d2-5e96472636a9, 'name': SearchDatastore_Task, 'duration_secs': 0.01361} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.546090] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.546496] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1491.546899] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.547213] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.547553] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1491.547929] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42db7d38-fe1d-482b-9cbf-03a84a9a7dd3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.556888] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1491.557188] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1491.560457] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31754fc6-0ce7-4c2c-a2ec-afcbd5c5a680 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.566595] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1491.566595] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f9b37e-a924-d647-88b0-53651db83f3a" [ 1491.566595] env[61978]: _type = "Task" [ 1491.566595] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.581343] env[61978]: INFO nova.compute.manager [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Took 11.58 seconds to build instance. [ 1491.582325] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f9b37e-a924-d647-88b0-53651db83f3a, 'name': SearchDatastore_Task, 'duration_secs': 0.010657} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.583384] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7660e0ae-1367-450b-a6a7-196eae0ea11c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.588536] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1491.588536] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a2f211-e613-a4f2-2286-a28d16ae92e1" [ 1491.588536] env[61978]: _type = "Task" [ 1491.588536] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.597325] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a2f211-e613-a4f2-2286-a28d16ae92e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.941934] env[61978]: DEBUG oslo_concurrency.lockutils [req-f5af274f-0476-4a39-b1a1-0a03643465ac req-b0f7bc40-774c-4a04-bf82-4a52981b23d4 service nova] Releasing lock "refresh_cache-116a3384-8bf3-49c6-9ee0-01d2781c69d5" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.012438] env[61978]: DEBUG nova.compute.manager [req-e4aef9e4-5f77-4b48-8583-5e19dfc896d8 req-bd5bad3d-91a8-41be-9cac-0eea9ad9ff91 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Received event network-changed-d8c08c29-7a03-4668-9677-a2fd2c6b39eb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1492.012674] env[61978]: DEBUG nova.compute.manager [req-e4aef9e4-5f77-4b48-8583-5e19dfc896d8 req-bd5bad3d-91a8-41be-9cac-0eea9ad9ff91 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Refreshing instance network info cache due to event network-changed-d8c08c29-7a03-4668-9677-a2fd2c6b39eb. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1492.012894] env[61978]: DEBUG oslo_concurrency.lockutils [req-e4aef9e4-5f77-4b48-8583-5e19dfc896d8 req-bd5bad3d-91a8-41be-9cac-0eea9ad9ff91 service nova] Acquiring lock "refresh_cache-77a8cde0-b046-4970-9979-9d4b85a224e2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.013071] env[61978]: DEBUG oslo_concurrency.lockutils [req-e4aef9e4-5f77-4b48-8583-5e19dfc896d8 req-bd5bad3d-91a8-41be-9cac-0eea9ad9ff91 service nova] Acquired lock "refresh_cache-77a8cde0-b046-4970-9979-9d4b85a224e2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.013250] env[61978]: DEBUG nova.network.neutron [req-e4aef9e4-5f77-4b48-8583-5e19dfc896d8 req-bd5bad3d-91a8-41be-9cac-0eea9ad9ff91 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Refreshing network info cache for port d8c08c29-7a03-4668-9677-a2fd2c6b39eb {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1492.077598] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.077880] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.078093] env[61978]: DEBUG nova.objects.instance [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lazy-loading 'pci_requests' on Instance uuid 6a454083-8d85-4a29-98dc-29eb0a072560 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1492.083252] env[61978]: DEBUG oslo_concurrency.lockutils [None req-0d39dfeb-6c29-4e93-ad51-9765de1635c8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "77a8cde0-b046-4970-9979-9d4b85a224e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.095s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.102255] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a2f211-e613-a4f2-2286-a28d16ae92e1, 'name': SearchDatastore_Task, 'duration_secs': 0.014541} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.102572] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.102843] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 116a3384-8bf3-49c6-9ee0-01d2781c69d5/116a3384-8bf3-49c6-9ee0-01d2781c69d5.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1492.103169] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d88d74e-0ff4-471a-a34f-a4fe07adc9c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.116295] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1492.116295] env[61978]: value = "task-1396171" [ 1492.116295] env[61978]: _type = "Task" [ 1492.116295] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.136106] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396171, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.582665] env[61978]: DEBUG nova.objects.instance [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lazy-loading 'numa_topology' on Instance uuid 6a454083-8d85-4a29-98dc-29eb0a072560 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1492.627882] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396171, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.748823] env[61978]: DEBUG nova.network.neutron [req-e4aef9e4-5f77-4b48-8583-5e19dfc896d8 req-bd5bad3d-91a8-41be-9cac-0eea9ad9ff91 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Updated VIF entry in instance network info cache for port d8c08c29-7a03-4668-9677-a2fd2c6b39eb. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1492.749315] env[61978]: DEBUG nova.network.neutron [req-e4aef9e4-5f77-4b48-8583-5e19dfc896d8 req-bd5bad3d-91a8-41be-9cac-0eea9ad9ff91 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Updating instance_info_cache with network_info: [{"id": "d8c08c29-7a03-4668-9677-a2fd2c6b39eb", "address": "fa:16:3e:6b:21:d2", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8c08c29-7a", "ovs_interfaceid": "d8c08c29-7a03-4668-9677-a2fd2c6b39eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.085619] env[61978]: INFO nova.compute.claims [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1493.129360] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396171, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.252240] env[61978]: DEBUG oslo_concurrency.lockutils [req-e4aef9e4-5f77-4b48-8583-5e19dfc896d8 req-bd5bad3d-91a8-41be-9cac-0eea9ad9ff91 service nova] Releasing lock "refresh_cache-77a8cde0-b046-4970-9979-9d4b85a224e2" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.628610] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396171, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.130533] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396171, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.560645} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.130850] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 116a3384-8bf3-49c6-9ee0-01d2781c69d5/116a3384-8bf3-49c6-9ee0-01d2781c69d5.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1494.131057] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1494.131331] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ff12d369-3092-467a-b253-0f739ce56203 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.138091] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1494.138091] env[61978]: value = "task-1396172" [ 1494.138091] env[61978]: _type = "Task" [ 1494.138091] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.145211] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396172, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.168038] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aecd1aa6-ada1-48ba-9a42-e4f2314b348f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.176311] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b097af5e-dff4-46ac-a824-3317eafe3a12 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.205456] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700629d0-b403-48d5-940c-1fd339770342 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.213057] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bba270d-ea39-4704-92b8-6e8384dc8720 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.226129] env[61978]: DEBUG nova.compute.provider_tree [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1494.647510] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396172, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086942} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.647778] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1494.648570] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55db4292-9d7a-41a4-82b3-33dbcefc7237 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.669536] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 116a3384-8bf3-49c6-9ee0-01d2781c69d5/116a3384-8bf3-49c6-9ee0-01d2781c69d5.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1494.669782] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6945c183-ab29-4c20-b29a-e3b66dfa3465 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.688371] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1494.688371] env[61978]: value = "task-1396173" [ 1494.688371] env[61978]: _type = "Task" [ 1494.688371] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.695521] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396173, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.730017] env[61978]: DEBUG nova.scheduler.client.report [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1495.199285] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396173, 'name': ReconfigVM_Task, 'duration_secs': 0.378111} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.199579] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 116a3384-8bf3-49c6-9ee0-01d2781c69d5/116a3384-8bf3-49c6-9ee0-01d2781c69d5.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1495.200178] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c240e784-2dbb-4495-afd3-5c20cab06494 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.206300] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1495.206300] env[61978]: value = "task-1396174" [ 1495.206300] env[61978]: _type = "Task" [ 1495.206300] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.213471] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396174, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.235424] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.157s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.264339] env[61978]: INFO nova.network.neutron [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updating port 6c4911ae-3f5e-46c7-9538-7e1a9811252e with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1495.716265] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396174, 'name': Rename_Task, 'duration_secs': 0.213431} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.716559] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1495.716800] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97612ef2-7aae-408a-8037-f63851e94b68 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.723778] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1495.723778] env[61978]: value = "task-1396175" [ 1495.723778] env[61978]: _type = "Task" [ 1495.723778] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.730914] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396175, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.233167] env[61978]: DEBUG oslo_vmware.api [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396175, 'name': PowerOnVM_Task, 'duration_secs': 0.442875} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.233654] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1496.233654] env[61978]: INFO nova.compute.manager [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Took 7.71 seconds to spawn the instance on the hypervisor. [ 1496.233804] env[61978]: DEBUG nova.compute.manager [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1496.234572] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9283aa-b9ad-4ad1-8b01-dbbde610ceff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.649025] env[61978]: DEBUG nova.compute.manager [req-855f4e11-2907-406b-ae50-40efb7e3393c req-2b57863e-1a39-47f8-8f6d-34aa7dc1876c service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Received event network-vif-plugged-6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1496.649352] env[61978]: DEBUG oslo_concurrency.lockutils [req-855f4e11-2907-406b-ae50-40efb7e3393c req-2b57863e-1a39-47f8-8f6d-34aa7dc1876c service nova] Acquiring lock "6a454083-8d85-4a29-98dc-29eb0a072560-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.649519] env[61978]: DEBUG oslo_concurrency.lockutils [req-855f4e11-2907-406b-ae50-40efb7e3393c req-2b57863e-1a39-47f8-8f6d-34aa7dc1876c service nova] Lock "6a454083-8d85-4a29-98dc-29eb0a072560-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.649699] env[61978]: DEBUG oslo_concurrency.lockutils [req-855f4e11-2907-406b-ae50-40efb7e3393c req-2b57863e-1a39-47f8-8f6d-34aa7dc1876c service nova] Lock "6a454083-8d85-4a29-98dc-29eb0a072560-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.649872] env[61978]: DEBUG nova.compute.manager [req-855f4e11-2907-406b-ae50-40efb7e3393c req-2b57863e-1a39-47f8-8f6d-34aa7dc1876c service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] No waiting events found dispatching network-vif-plugged-6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1496.650054] env[61978]: WARNING nova.compute.manager [req-855f4e11-2907-406b-ae50-40efb7e3393c req-2b57863e-1a39-47f8-8f6d-34aa7dc1876c service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Received unexpected event network-vif-plugged-6c4911ae-3f5e-46c7-9538-7e1a9811252e for instance with vm_state shelved_offloaded and task_state spawning. [ 1496.742193] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.742193] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.742705] env[61978]: DEBUG nova.network.neutron [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1496.752402] env[61978]: INFO nova.compute.manager [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Took 15.80 seconds to build instance. [ 1497.255050] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6328e999-3893-4fcb-9322-463713297553 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "116a3384-8bf3-49c6-9ee0-01d2781c69d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.312s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.443246] env[61978]: DEBUG oslo_concurrency.lockutils [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "116a3384-8bf3-49c6-9ee0-01d2781c69d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.443474] env[61978]: DEBUG oslo_concurrency.lockutils [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "116a3384-8bf3-49c6-9ee0-01d2781c69d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.443688] env[61978]: DEBUG oslo_concurrency.lockutils [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "116a3384-8bf3-49c6-9ee0-01d2781c69d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.443875] env[61978]: DEBUG oslo_concurrency.lockutils [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "116a3384-8bf3-49c6-9ee0-01d2781c69d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.444071] env[61978]: DEBUG oslo_concurrency.lockutils [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "116a3384-8bf3-49c6-9ee0-01d2781c69d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.446660] env[61978]: INFO nova.compute.manager [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Terminating instance [ 1497.448689] env[61978]: DEBUG nova.compute.manager [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1497.448890] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1497.449749] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a8ffab-7414-41bf-8598-7618db1cedb9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.457945] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1497.458199] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9847ea7c-d3f4-4039-8a38-48a4cc402191 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.464138] env[61978]: DEBUG oslo_vmware.api [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1497.464138] env[61978]: value = "task-1396176" [ 1497.464138] env[61978]: _type = "Task" [ 1497.464138] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.472573] env[61978]: DEBUG oslo_vmware.api [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.478387] env[61978]: DEBUG nova.network.neutron [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updating instance_info_cache with network_info: [{"id": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "address": "fa:16:3e:c3:c6:fc", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c4911ae-3f", "ovs_interfaceid": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.975260] env[61978]: DEBUG oslo_vmware.api [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396176, 'name': PowerOffVM_Task, 'duration_secs': 0.289517} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.975541] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1497.975712] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1497.975973] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5b9118a-c968-47ad-b677-b652850b32e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.981174] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.009848] env[61978]: DEBUG nova.virt.hardware [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='03018882e207805cb0a09f7af388c5dd',container_format='bare',created_at=2024-11-04T15:13:45Z,direct_url=,disk_format='vmdk',id=733843c8-cf02-4c91-9365-b1d9e9aaa3be,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1465373634-shelved',owner='a8f40d19e7c74ade886c322a78583545',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2024-11-04T15:13:58Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1498.010129] env[61978]: DEBUG nova.virt.hardware [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1498.010301] env[61978]: DEBUG nova.virt.hardware [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1498.010522] env[61978]: DEBUG nova.virt.hardware [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1498.010663] env[61978]: DEBUG nova.virt.hardware [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1498.010819] env[61978]: DEBUG nova.virt.hardware [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1498.011046] env[61978]: DEBUG nova.virt.hardware [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1498.011219] env[61978]: DEBUG nova.virt.hardware [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1498.011401] env[61978]: DEBUG nova.virt.hardware [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1498.011573] env[61978]: DEBUG nova.virt.hardware [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1498.011750] env[61978]: DEBUG nova.virt.hardware [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1498.012953] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7461cfe2-eef8-41b3-b5fb-34f570202779 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.020416] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bdb15f-9db5-4f75-96ac-1e8c8ca4ae91 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.034072] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:c6:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c4911ae-3f5e-46c7-9538-7e1a9811252e', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1498.041467] env[61978]: DEBUG oslo.service.loopingcall [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1498.042592] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1498.042867] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1498.043076] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1498.043265] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleting the datastore file [datastore2] 116a3384-8bf3-49c6-9ee0-01d2781c69d5 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1498.043496] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1619018d-d2ed-45c9-9ece-d74c507167e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.057115] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8b80347-5351-42e7-b96d-645ad71dfaa3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.063697] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1498.063697] env[61978]: value = "task-1396179" [ 1498.063697] env[61978]: _type = "Task" [ 1498.063697] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.064862] env[61978]: DEBUG oslo_vmware.api [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1498.064862] env[61978]: value = "task-1396178" [ 1498.064862] env[61978]: _type = "Task" [ 1498.064862] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.075742] env[61978]: DEBUG oslo_vmware.api [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396178, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.077977] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396179, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.577274] env[61978]: DEBUG oslo_vmware.api [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396178, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156173} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.580316] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1498.580516] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1498.580707] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1498.580892] env[61978]: INFO nova.compute.manager [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1498.581157] env[61978]: DEBUG oslo.service.loopingcall [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1498.581330] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396179, 'name': CreateVM_Task, 'duration_secs': 0.44824} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.581533] env[61978]: DEBUG nova.compute.manager [-] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1498.581626] env[61978]: DEBUG nova.network.neutron [-] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1498.583073] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1498.583665] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.583838] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "[datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.584233] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1498.585053] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73aa3d1d-43bf-4cef-bc93-63b2f78a039e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.588950] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1498.588950] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521de9d0-412f-c850-2bc1-b6a4fc86c388" [ 1498.588950] env[61978]: _type = "Task" [ 1498.588950] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.596895] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521de9d0-412f-c850-2bc1-b6a4fc86c388, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.677251] env[61978]: DEBUG nova.compute.manager [req-7b37d507-712a-493c-b4f0-37d0e1d57323 req-1520062b-d8e1-4678-9950-ef03c019d2f2 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Received event network-changed-6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1498.677483] env[61978]: DEBUG nova.compute.manager [req-7b37d507-712a-493c-b4f0-37d0e1d57323 req-1520062b-d8e1-4678-9950-ef03c019d2f2 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Refreshing instance network info cache due to event network-changed-6c4911ae-3f5e-46c7-9538-7e1a9811252e. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1498.677706] env[61978]: DEBUG oslo_concurrency.lockutils [req-7b37d507-712a-493c-b4f0-37d0e1d57323 req-1520062b-d8e1-4678-9950-ef03c019d2f2 service nova] Acquiring lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.678119] env[61978]: DEBUG oslo_concurrency.lockutils [req-7b37d507-712a-493c-b4f0-37d0e1d57323 req-1520062b-d8e1-4678-9950-ef03c019d2f2 service nova] Acquired lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.678379] env[61978]: DEBUG nova.network.neutron [req-7b37d507-712a-493c-b4f0-37d0e1d57323 req-1520062b-d8e1-4678-9950-ef03c019d2f2 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Refreshing network info cache for port 6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1499.103879] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "[datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.104233] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Processing image 733843c8-cf02-4c91-9365-b1d9e9aaa3be {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1499.104632] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be/733843c8-cf02-4c91-9365-b1d9e9aaa3be.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.104881] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "[datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be/733843c8-cf02-4c91-9365-b1d9e9aaa3be.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.105197] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1499.105941] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7de55909-d874-4c17-9d9f-b5ae8753fd37 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.121434] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1499.121621] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1499.122341] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a422a6bb-2a02-439c-913d-de806ebcbf86 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.127668] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1499.127668] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5268d0a2-ddde-168e-bae1-7119ebe72232" [ 1499.127668] env[61978]: _type = "Task" [ 1499.127668] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.134673] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5268d0a2-ddde-168e-bae1-7119ebe72232, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.332266] env[61978]: DEBUG nova.network.neutron [-] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.401850] env[61978]: DEBUG nova.network.neutron [req-7b37d507-712a-493c-b4f0-37d0e1d57323 req-1520062b-d8e1-4678-9950-ef03c019d2f2 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updated VIF entry in instance network info cache for port 6c4911ae-3f5e-46c7-9538-7e1a9811252e. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1499.402257] env[61978]: DEBUG nova.network.neutron [req-7b37d507-712a-493c-b4f0-37d0e1d57323 req-1520062b-d8e1-4678-9950-ef03c019d2f2 service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updating instance_info_cache with network_info: [{"id": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "address": "fa:16:3e:c3:c6:fc", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c4911ae-3f", "ovs_interfaceid": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.638095] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Preparing fetch location {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1499.638481] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Fetch image to [datastore1] OSTACK_IMG_103def9e-9884-48d9-9432-ba7a7cd9f9d5/OSTACK_IMG_103def9e-9884-48d9-9432-ba7a7cd9f9d5.vmdk {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1499.638587] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Downloading stream optimized image 733843c8-cf02-4c91-9365-b1d9e9aaa3be to [datastore1] OSTACK_IMG_103def9e-9884-48d9-9432-ba7a7cd9f9d5/OSTACK_IMG_103def9e-9884-48d9-9432-ba7a7cd9f9d5.vmdk on the data store datastore1 as vApp {{(pid=61978) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1499.638766] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Downloading image file data 733843c8-cf02-4c91-9365-b1d9e9aaa3be to the ESX as VM named 'OSTACK_IMG_103def9e-9884-48d9-9432-ba7a7cd9f9d5' {{(pid=61978) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1499.709174] env[61978]: DEBUG oslo_vmware.rw_handles [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1499.709174] env[61978]: value = "resgroup-9" [ 1499.709174] env[61978]: _type = "ResourcePool" [ 1499.709174] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1499.709532] env[61978]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-5a194913-f8c3-4df1-99f1-7c37176956f6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.732603] env[61978]: DEBUG oslo_vmware.rw_handles [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lease: (returnval){ [ 1499.732603] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aeefcb-c393-6d4b-310b-febde103dcb2" [ 1499.732603] env[61978]: _type = "HttpNfcLease" [ 1499.732603] env[61978]: } obtained for vApp import into resource pool (val){ [ 1499.732603] env[61978]: value = "resgroup-9" [ 1499.732603] env[61978]: _type = "ResourcePool" [ 1499.732603] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1499.732911] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the lease: (returnval){ [ 1499.732911] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aeefcb-c393-6d4b-310b-febde103dcb2" [ 1499.732911] env[61978]: _type = "HttpNfcLease" [ 1499.732911] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1499.738677] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1499.738677] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aeefcb-c393-6d4b-310b-febde103dcb2" [ 1499.738677] env[61978]: _type = "HttpNfcLease" [ 1499.738677] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1499.834446] env[61978]: INFO nova.compute.manager [-] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Took 1.25 seconds to deallocate network for instance. [ 1499.904772] env[61978]: DEBUG oslo_concurrency.lockutils [req-7b37d507-712a-493c-b4f0-37d0e1d57323 req-1520062b-d8e1-4678-9950-ef03c019d2f2 service nova] Releasing lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.241109] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1500.241109] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aeefcb-c393-6d4b-310b-febde103dcb2" [ 1500.241109] env[61978]: _type = "HttpNfcLease" [ 1500.241109] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1500.341471] env[61978]: DEBUG oslo_concurrency.lockutils [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.341796] env[61978]: DEBUG oslo_concurrency.lockutils [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.342042] env[61978]: DEBUG nova.objects.instance [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lazy-loading 'resources' on Instance uuid 116a3384-8bf3-49c6-9ee0-01d2781c69d5 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1500.701769] env[61978]: DEBUG nova.compute.manager [req-898dc418-8b78-4ae3-a17f-24c9aed8f752 req-357ebc98-8000-4bd6-812e-69809205479c service nova] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Received event network-vif-deleted-dbce7690-5815-4441-be23-3ad6e4b0b885 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1500.740598] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1500.740598] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aeefcb-c393-6d4b-310b-febde103dcb2" [ 1500.740598] env[61978]: _type = "HttpNfcLease" [ 1500.740598] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1500.740874] env[61978]: DEBUG oslo_vmware.rw_handles [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1500.740874] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52aeefcb-c393-6d4b-310b-febde103dcb2" [ 1500.740874] env[61978]: _type = "HttpNfcLease" [ 1500.740874] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1500.741575] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60476fe-6eb5-497c-b501-58c9a66805db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.748069] env[61978]: DEBUG oslo_vmware.rw_handles [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529413ea-ec1c-d1a5-b4ab-5182f692bb8f/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1500.748283] env[61978]: DEBUG oslo_vmware.rw_handles [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529413ea-ec1c-d1a5-b4ab-5182f692bb8f/disk-0.vmdk. {{(pid=61978) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1500.811820] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-49fca5e7-9c9a-4f3d-9547-2dc4eb0cec8b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.917898] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c60a813-2405-4f6f-9bce-98ae863d8bad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.925356] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c198df-f717-4fd6-9753-58087eb341cd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.957415] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7d68bb-5a5a-430d-a44c-3a9471848235 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.964486] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e877bc44-611c-4137-b265-2643de39246f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.978644] env[61978]: DEBUG nova.compute.provider_tree [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1501.482099] env[61978]: DEBUG nova.scheduler.client.report [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1501.883299] env[61978]: DEBUG oslo_vmware.rw_handles [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Completed reading data from the image iterator. {{(pid=61978) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1501.883680] env[61978]: DEBUG oslo_vmware.rw_handles [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529413ea-ec1c-d1a5-b4ab-5182f692bb8f/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1501.884602] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb3bbc2-3e48-484d-acf6-ff3c8cb33416 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.891767] env[61978]: DEBUG oslo_vmware.rw_handles [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529413ea-ec1c-d1a5-b4ab-5182f692bb8f/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1501.891950] env[61978]: DEBUG oslo_vmware.rw_handles [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529413ea-ec1c-d1a5-b4ab-5182f692bb8f/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1501.892243] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-13fd8e54-27c5-4bcf-9c02-cd19e4535759 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.990108] env[61978]: DEBUG oslo_concurrency.lockutils [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.648s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.009166] env[61978]: INFO nova.scheduler.client.report [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted allocations for instance 116a3384-8bf3-49c6-9ee0-01d2781c69d5 [ 1502.096313] env[61978]: DEBUG oslo_vmware.rw_handles [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529413ea-ec1c-d1a5-b4ab-5182f692bb8f/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1502.096574] env[61978]: INFO nova.virt.vmwareapi.images [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Downloaded image file data 733843c8-cf02-4c91-9365-b1d9e9aaa3be [ 1502.097417] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1361e9c4-e7bd-49b2-8b45-b220697ec798 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.112336] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08d8455c-08c3-4646-978c-3b3afd2d826f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.152194] env[61978]: INFO nova.virt.vmwareapi.images [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] The imported VM was unregistered [ 1502.154591] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Caching image {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1502.154853] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating directory with path [datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1502.155158] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5752b0e2-d52f-4b3b-abe0-4056819f6e0c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.178746] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Created directory with path [datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1502.178931] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_103def9e-9884-48d9-9432-ba7a7cd9f9d5/OSTACK_IMG_103def9e-9884-48d9-9432-ba7a7cd9f9d5.vmdk to [datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be/733843c8-cf02-4c91-9365-b1d9e9aaa3be.vmdk. {{(pid=61978) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1502.179199] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-94307763-0053-48f8-b844-bdec9a4b58e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.185359] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1502.185359] env[61978]: value = "task-1396182" [ 1502.185359] env[61978]: _type = "Task" [ 1502.185359] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.192651] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396182, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.517182] env[61978]: DEBUG oslo_concurrency.lockutils [None req-178d9248-3c90-4e97-ae40-541f3ac4137f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "116a3384-8bf3-49c6-9ee0-01d2781c69d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.074s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.695693] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396182, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.196467] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396182, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.393367] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "fe81b6d2-053f-4db4-b3dd-a67b21d02c52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.393619] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "fe81b6d2-053f-4db4-b3dd-a67b21d02c52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.697030] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396182, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.896025] env[61978]: DEBUG nova.compute.manager [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1504.198327] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396182, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.420709] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.421020] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.422595] env[61978]: INFO nova.compute.claims [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1504.698251] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396182, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.198482] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396182, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.506515] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643ebf64-9974-4fc0-9374-b72bcc66efb2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.514157] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655c1f41-ec85-4e38-b4a2-af0356fd9707 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.544717] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baadd9af-cb22-44dd-8f21-98e7032baa6a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.552730] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee87ee0-7f4f-4a3b-a02b-862037f7b219 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.566310] env[61978]: DEBUG nova.compute.provider_tree [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1505.699616] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396182, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.363831} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.699904] env[61978]: INFO nova.virt.vmwareapi.ds_util [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_103def9e-9884-48d9-9432-ba7a7cd9f9d5/OSTACK_IMG_103def9e-9884-48d9-9432-ba7a7cd9f9d5.vmdk to [datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be/733843c8-cf02-4c91-9365-b1d9e9aaa3be.vmdk. [ 1505.700118] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Cleaning up location [datastore1] OSTACK_IMG_103def9e-9884-48d9-9432-ba7a7cd9f9d5 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1505.700325] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_103def9e-9884-48d9-9432-ba7a7cd9f9d5 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1505.700560] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4abd1128-278f-49e5-81cc-a7383f925849 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.706816] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1505.706816] env[61978]: value = "task-1396183" [ 1505.706816] env[61978]: _type = "Task" [ 1505.706816] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.714470] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396183, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.070081] env[61978]: DEBUG nova.scheduler.client.report [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1506.216736] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396183, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.337981} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.217098] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1506.217189] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "[datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be/733843c8-cf02-4c91-9365-b1d9e9aaa3be.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.217447] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be/733843c8-cf02-4c91-9365-b1d9e9aaa3be.vmdk to [datastore1] 6a454083-8d85-4a29-98dc-29eb0a072560/6a454083-8d85-4a29-98dc-29eb0a072560.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1506.217707] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eecb48c8-e093-4a25-9b84-5253b3305c50 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.224177] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1506.224177] env[61978]: value = "task-1396184" [ 1506.224177] env[61978]: _type = "Task" [ 1506.224177] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.231150] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396184, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.574553] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.153s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.575063] env[61978]: DEBUG nova.compute.manager [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1506.735354] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396184, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.080527] env[61978]: DEBUG nova.compute.utils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1507.082115] env[61978]: DEBUG nova.compute.manager [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1507.082290] env[61978]: DEBUG nova.network.neutron [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1507.132674] env[61978]: DEBUG nova.policy [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df950ca91cd64479950545608f749fb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaa4a0cb1a4c45949b43032fd9395200', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1507.235061] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396184, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.408729] env[61978]: DEBUG nova.network.neutron [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Successfully created port: 280b4322-d003-439c-af46-b107eef37f87 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1507.585317] env[61978]: DEBUG nova.compute.manager [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1507.735480] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396184, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.235718] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396184, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.596841] env[61978]: DEBUG nova.compute.manager [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1508.625538] env[61978]: DEBUG nova.virt.hardware [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1508.625821] env[61978]: DEBUG nova.virt.hardware [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1508.626023] env[61978]: DEBUG nova.virt.hardware [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1508.626283] env[61978]: DEBUG nova.virt.hardware [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1508.626477] env[61978]: DEBUG nova.virt.hardware [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1508.626635] env[61978]: DEBUG nova.virt.hardware [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1508.626852] env[61978]: DEBUG nova.virt.hardware [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1508.627037] env[61978]: DEBUG nova.virt.hardware [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1508.627231] env[61978]: DEBUG nova.virt.hardware [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1508.627406] env[61978]: DEBUG nova.virt.hardware [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1508.627583] env[61978]: DEBUG nova.virt.hardware [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1508.628576] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070b9c35-66c0-46d5-9a98-450a38eb8f2b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.636974] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819a9f68-0422-4338-a20a-7d207661b60e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.736015] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396184, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.843450] env[61978]: DEBUG nova.compute.manager [req-b354d41e-6513-4757-acbc-8e7a26e3f76a req-05a4af9b-0e26-4f7b-b3a1-9ffb4b963c9c service nova] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Received event network-vif-plugged-280b4322-d003-439c-af46-b107eef37f87 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1508.843709] env[61978]: DEBUG oslo_concurrency.lockutils [req-b354d41e-6513-4757-acbc-8e7a26e3f76a req-05a4af9b-0e26-4f7b-b3a1-9ffb4b963c9c service nova] Acquiring lock "fe81b6d2-053f-4db4-b3dd-a67b21d02c52-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.843995] env[61978]: DEBUG oslo_concurrency.lockutils [req-b354d41e-6513-4757-acbc-8e7a26e3f76a req-05a4af9b-0e26-4f7b-b3a1-9ffb4b963c9c service nova] Lock "fe81b6d2-053f-4db4-b3dd-a67b21d02c52-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.844099] env[61978]: DEBUG oslo_concurrency.lockutils [req-b354d41e-6513-4757-acbc-8e7a26e3f76a req-05a4af9b-0e26-4f7b-b3a1-9ffb4b963c9c service nova] Lock "fe81b6d2-053f-4db4-b3dd-a67b21d02c52-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.844281] env[61978]: DEBUG nova.compute.manager [req-b354d41e-6513-4757-acbc-8e7a26e3f76a req-05a4af9b-0e26-4f7b-b3a1-9ffb4b963c9c service nova] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] No waiting events found dispatching network-vif-plugged-280b4322-d003-439c-af46-b107eef37f87 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1508.844450] env[61978]: WARNING nova.compute.manager [req-b354d41e-6513-4757-acbc-8e7a26e3f76a req-05a4af9b-0e26-4f7b-b3a1-9ffb4b963c9c service nova] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Received unexpected event network-vif-plugged-280b4322-d003-439c-af46-b107eef37f87 for instance with vm_state building and task_state spawning. [ 1509.237529] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396184, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.542463} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.237936] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/733843c8-cf02-4c91-9365-b1d9e9aaa3be/733843c8-cf02-4c91-9365-b1d9e9aaa3be.vmdk to [datastore1] 6a454083-8d85-4a29-98dc-29eb0a072560/6a454083-8d85-4a29-98dc-29eb0a072560.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1509.238727] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da8d395-329e-4438-b636-e76448d7a5a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.260395] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 6a454083-8d85-4a29-98dc-29eb0a072560/6a454083-8d85-4a29-98dc-29eb0a072560.vmdk or device None with type streamOptimized {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1509.260738] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-faf66862-a2f5-44f5-9a59-f14693347672 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.281435] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1509.281435] env[61978]: value = "task-1396185" [ 1509.281435] env[61978]: _type = "Task" [ 1509.281435] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.288707] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396185, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.390932] env[61978]: DEBUG nova.network.neutron [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Successfully updated port: 280b4322-d003-439c-af46-b107eef37f87 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1509.413199] env[61978]: DEBUG nova.compute.manager [req-fc0f31bb-5ed3-4300-97b4-9925489dbec4 req-3edb945e-1a5f-488d-92ed-acea1ae6aab0 service nova] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Received event network-changed-280b4322-d003-439c-af46-b107eef37f87 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1509.413412] env[61978]: DEBUG nova.compute.manager [req-fc0f31bb-5ed3-4300-97b4-9925489dbec4 req-3edb945e-1a5f-488d-92ed-acea1ae6aab0 service nova] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Refreshing instance network info cache due to event network-changed-280b4322-d003-439c-af46-b107eef37f87. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1509.413638] env[61978]: DEBUG oslo_concurrency.lockutils [req-fc0f31bb-5ed3-4300-97b4-9925489dbec4 req-3edb945e-1a5f-488d-92ed-acea1ae6aab0 service nova] Acquiring lock "refresh_cache-fe81b6d2-053f-4db4-b3dd-a67b21d02c52" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.413788] env[61978]: DEBUG oslo_concurrency.lockutils [req-fc0f31bb-5ed3-4300-97b4-9925489dbec4 req-3edb945e-1a5f-488d-92ed-acea1ae6aab0 service nova] Acquired lock "refresh_cache-fe81b6d2-053f-4db4-b3dd-a67b21d02c52" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.413960] env[61978]: DEBUG nova.network.neutron [req-fc0f31bb-5ed3-4300-97b4-9925489dbec4 req-3edb945e-1a5f-488d-92ed-acea1ae6aab0 service nova] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Refreshing network info cache for port 280b4322-d003-439c-af46-b107eef37f87 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1509.791104] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396185, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.894020] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "refresh_cache-fe81b6d2-053f-4db4-b3dd-a67b21d02c52" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.945735] env[61978]: DEBUG nova.network.neutron [req-fc0f31bb-5ed3-4300-97b4-9925489dbec4 req-3edb945e-1a5f-488d-92ed-acea1ae6aab0 service nova] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1510.017031] env[61978]: DEBUG nova.network.neutron [req-fc0f31bb-5ed3-4300-97b4-9925489dbec4 req-3edb945e-1a5f-488d-92ed-acea1ae6aab0 service nova] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.292030] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396185, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.519737] env[61978]: DEBUG oslo_concurrency.lockutils [req-fc0f31bb-5ed3-4300-97b4-9925489dbec4 req-3edb945e-1a5f-488d-92ed-acea1ae6aab0 service nova] Releasing lock "refresh_cache-fe81b6d2-053f-4db4-b3dd-a67b21d02c52" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.520081] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "refresh_cache-fe81b6d2-053f-4db4-b3dd-a67b21d02c52" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.520245] env[61978]: DEBUG nova.network.neutron [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1510.793060] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396185, 'name': ReconfigVM_Task, 'duration_secs': 1.025936} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.793313] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 6a454083-8d85-4a29-98dc-29eb0a072560/6a454083-8d85-4a29-98dc-29eb0a072560.vmdk or device None with type streamOptimized {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1510.794450] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'encryption_format': None, 'encrypted': False, 'device_type': 'disk', 'disk_bus': None, 'size': 0, 'encryption_options': None, 'encryption_secret_uuid': None, 'guest_format': None, 'boot_index': 0, 'image_id': '4732143d-796a-4a66-9f1e-806f8b0654e0'}], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sdb', 'disk_bus': None, 'device_type': None, 'attachment_id': 'ba47d155-30d2-4d6e-8013-54e14c4c0217', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296082', 'volume_id': '92c75742-2007-44d7-9c7e-705254285c91', 'name': 'volume-92c75742-2007-44d7-9c7e-705254285c91', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '6a454083-8d85-4a29-98dc-29eb0a072560', 'attached_at': '', 'detached_at': '', 'volume_id': '92c75742-2007-44d7-9c7e-705254285c91', 'serial': '92c75742-2007-44d7-9c7e-705254285c91'}, 'boot_index': None, 'guest_format': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=61978) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1510.794664] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1510.794860] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296082', 'volume_id': '92c75742-2007-44d7-9c7e-705254285c91', 'name': 'volume-92c75742-2007-44d7-9c7e-705254285c91', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '6a454083-8d85-4a29-98dc-29eb0a072560', 'attached_at': '', 'detached_at': '', 'volume_id': '92c75742-2007-44d7-9c7e-705254285c91', 'serial': '92c75742-2007-44d7-9c7e-705254285c91'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1510.795624] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1780970c-7776-4e33-b953-e95a5ea82272 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.810195] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6546d89-7f7e-453f-be3c-f7f9c341c479 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.833096] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] volume-92c75742-2007-44d7-9c7e-705254285c91/volume-92c75742-2007-44d7-9c7e-705254285c91.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1510.833359] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0daecf3c-6c1d-4759-9821-41dcb0fc3937 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.852407] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1510.852407] env[61978]: value = "task-1396186" [ 1510.852407] env[61978]: _type = "Task" [ 1510.852407] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.860177] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396186, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.053059] env[61978]: DEBUG nova.network.neutron [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1511.186334] env[61978]: DEBUG nova.network.neutron [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Updating instance_info_cache with network_info: [{"id": "280b4322-d003-439c-af46-b107eef37f87", "address": "fa:16:3e:5b:f7:1f", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap280b4322-d0", "ovs_interfaceid": "280b4322-d003-439c-af46-b107eef37f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.362670] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396186, 'name': ReconfigVM_Task, 'duration_secs': 0.321244} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.363032] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Reconfigured VM instance instance-00000070 to attach disk [datastore2] volume-92c75742-2007-44d7-9c7e-705254285c91/volume-92c75742-2007-44d7-9c7e-705254285c91.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1511.367807] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bab003ff-7881-46c5-adcb-da30096b6499 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.382439] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1511.382439] env[61978]: value = "task-1396187" [ 1511.382439] env[61978]: _type = "Task" [ 1511.382439] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.389865] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396187, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.689551] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "refresh_cache-fe81b6d2-053f-4db4-b3dd-a67b21d02c52" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.689889] env[61978]: DEBUG nova.compute.manager [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Instance network_info: |[{"id": "280b4322-d003-439c-af46-b107eef37f87", "address": "fa:16:3e:5b:f7:1f", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap280b4322-d0", "ovs_interfaceid": "280b4322-d003-439c-af46-b107eef37f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1511.690376] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:f7:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb68953b-dee5-4d9d-b47b-277336ba76dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '280b4322-d003-439c-af46-b107eef37f87', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1511.697754] env[61978]: DEBUG oslo.service.loopingcall [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1511.697968] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1511.698213] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe53dcad-ab57-47ac-9bc3-21b8aa73cfdb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.717779] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1511.717779] env[61978]: value = "task-1396188" [ 1511.717779] env[61978]: _type = "Task" [ 1511.717779] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.725269] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396188, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.892643] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396187, 'name': ReconfigVM_Task, 'duration_secs': 0.143386} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.892981] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296082', 'volume_id': '92c75742-2007-44d7-9c7e-705254285c91', 'name': 'volume-92c75742-2007-44d7-9c7e-705254285c91', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '6a454083-8d85-4a29-98dc-29eb0a072560', 'attached_at': '', 'detached_at': '', 'volume_id': '92c75742-2007-44d7-9c7e-705254285c91', 'serial': '92c75742-2007-44d7-9c7e-705254285c91'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1511.893576] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-928b4002-3163-4f4c-89e9-17604b739c9a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.899752] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1511.899752] env[61978]: value = "task-1396189" [ 1511.899752] env[61978]: _type = "Task" [ 1511.899752] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.908986] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396189, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.227831] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396188, 'name': CreateVM_Task, 'duration_secs': 0.330236} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.228013] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1512.228786] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.228962] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.229341] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1512.229612] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22d9fe5c-2846-4175-a0a5-1ccd5437bed6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.234255] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1512.234255] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a53751-95e5-aa3e-3fd1-1b69c2cd2ef9" [ 1512.234255] env[61978]: _type = "Task" [ 1512.234255] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.241689] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a53751-95e5-aa3e-3fd1-1b69c2cd2ef9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.408931] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396189, 'name': Rename_Task, 'duration_secs': 0.161878} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.409377] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1512.409629] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9479277-aa9f-48e9-952c-3b896ff37e58 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.415659] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1512.415659] env[61978]: value = "task-1396190" [ 1512.415659] env[61978]: _type = "Task" [ 1512.415659] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.423187] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396190, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.745218] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a53751-95e5-aa3e-3fd1-1b69c2cd2ef9, 'name': SearchDatastore_Task, 'duration_secs': 0.008719} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.745491] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.745740] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1512.745986] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.746156] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.746352] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1512.746615] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be5208e1-1d93-4f84-8fe1-46f3b092fe87 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.754259] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1512.754442] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1512.755158] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1016fdf-75ab-4d50-8a04-f6643c4f8b99 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.760127] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1512.760127] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52937f29-16fa-6e55-410c-503ef239e521" [ 1512.760127] env[61978]: _type = "Task" [ 1512.760127] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.767644] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52937f29-16fa-6e55-410c-503ef239e521, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.925824] env[61978]: DEBUG oslo_vmware.api [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396190, 'name': PowerOnVM_Task, 'duration_secs': 0.445241} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.926108] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1513.025359] env[61978]: DEBUG nova.compute.manager [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1513.026430] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2301a022-b249-4366-a499-f64fa34b0d3a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.270794] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52937f29-16fa-6e55-410c-503ef239e521, 'name': SearchDatastore_Task, 'duration_secs': 0.011156} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.271544] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48781c95-7acf-4ae6-936e-19198350e01e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.276644] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1513.276644] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ab5082-93ef-e3cb-cff4-f430f94f0e88" [ 1513.276644] env[61978]: _type = "Task" [ 1513.276644] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.284374] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ab5082-93ef-e3cb-cff4-f430f94f0e88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.543111] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6a587e86-2248-4c53-bce3-d1bf63b14557 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "6a454083-8d85-4a29-98dc-29eb0a072560" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 28.710s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.788015] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52ab5082-93ef-e3cb-cff4-f430f94f0e88, 'name': SearchDatastore_Task, 'duration_secs': 0.009291} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.788303] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1513.788602] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] fe81b6d2-053f-4db4-b3dd-a67b21d02c52/fe81b6d2-053f-4db4-b3dd-a67b21d02c52.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1513.788871] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0925344-c8c6-4a1b-b51d-278ed93bb43d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.795154] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1513.795154] env[61978]: value = "task-1396191" [ 1513.795154] env[61978]: _type = "Task" [ 1513.795154] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.802380] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396191, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.307641] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396191, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.806858] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396191, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592246} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.807203] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] fe81b6d2-053f-4db4-b3dd-a67b21d02c52/fe81b6d2-053f-4db4-b3dd-a67b21d02c52.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1514.807340] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1514.807591] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0849afcf-1a1b-4cbf-a5fb-3b11ec884062 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.813921] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1514.813921] env[61978]: value = "task-1396192" [ 1514.813921] env[61978]: _type = "Task" [ 1514.813921] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.822311] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396192, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.323335] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396192, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071209} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.323654] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1515.324442] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927ba981-bc35-4066-b512-4fd91a61f73c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.346330] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] fe81b6d2-053f-4db4-b3dd-a67b21d02c52/fe81b6d2-053f-4db4-b3dd-a67b21d02c52.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1515.346582] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c24ae6e9-d639-43d5-8837-fe4388a60bf0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.364619] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1515.364619] env[61978]: value = "task-1396193" [ 1515.364619] env[61978]: _type = "Task" [ 1515.364619] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.372639] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396193, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.874225] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396193, 'name': ReconfigVM_Task, 'duration_secs': 0.292614} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.874537] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Reconfigured VM instance instance-00000077 to attach disk [datastore2] fe81b6d2-053f-4db4-b3dd-a67b21d02c52/fe81b6d2-053f-4db4-b3dd-a67b21d02c52.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1515.875145] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d2e9218-e757-4162-88ad-7e3e9500fde2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.881319] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1515.881319] env[61978]: value = "task-1396194" [ 1515.881319] env[61978]: _type = "Task" [ 1515.881319] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.889012] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396194, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.391100] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396194, 'name': Rename_Task, 'duration_secs': 0.147486} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.391392] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1516.391637] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-312e4d62-5ce2-4bc4-94c5-bbf02ebf4324 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.397857] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1516.397857] env[61978]: value = "task-1396195" [ 1516.397857] env[61978]: _type = "Task" [ 1516.397857] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.404754] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396195, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.909603] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396195, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.409121] env[61978]: DEBUG oslo_vmware.api [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396195, 'name': PowerOnVM_Task, 'duration_secs': 0.619418} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.409416] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1517.409611] env[61978]: INFO nova.compute.manager [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Took 8.81 seconds to spawn the instance on the hypervisor. [ 1517.409802] env[61978]: DEBUG nova.compute.manager [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1517.410566] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe05f0c-6df5-45c9-b9e9-e5119ddadd0a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.928023] env[61978]: INFO nova.compute.manager [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Took 13.53 seconds to build instance. [ 1518.430600] env[61978]: DEBUG oslo_concurrency.lockutils [None req-22739c76-318c-4725-9ff9-dcf528975fbf tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "fe81b6d2-053f-4db4-b3dd-a67b21d02c52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.037s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.692859] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "fe81b6d2-053f-4db4-b3dd-a67b21d02c52" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.692859] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "fe81b6d2-053f-4db4-b3dd-a67b21d02c52" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.693273] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "fe81b6d2-053f-4db4-b3dd-a67b21d02c52-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.693273] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "fe81b6d2-053f-4db4-b3dd-a67b21d02c52-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.693420] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "fe81b6d2-053f-4db4-b3dd-a67b21d02c52-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.695652] env[61978]: INFO nova.compute.manager [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Terminating instance [ 1518.697434] env[61978]: DEBUG nova.compute.manager [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1518.697638] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1518.698502] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699a8c9b-f1d7-4c1a-8376-9e1ff0c71c55 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.707428] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1518.707656] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3215328f-c2a8-430a-a593-977de1a8d3bb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.713943] env[61978]: DEBUG oslo_vmware.api [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1518.713943] env[61978]: value = "task-1396196" [ 1518.713943] env[61978]: _type = "Task" [ 1518.713943] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.721887] env[61978]: DEBUG oslo_vmware.api [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396196, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.224289] env[61978]: DEBUG oslo_vmware.api [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396196, 'name': PowerOffVM_Task, 'duration_secs': 0.163467} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.224626] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1519.224776] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1519.225048] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b89db6d5-7c9d-4790-bc14-51befd7714dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.281716] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1519.281936] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1519.282150] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleting the datastore file [datastore2] fe81b6d2-053f-4db4-b3dd-a67b21d02c52 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1519.282419] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea12134e-af81-4577-a08f-e592a00f7d14 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.288768] env[61978]: DEBUG oslo_vmware.api [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1519.288768] env[61978]: value = "task-1396198" [ 1519.288768] env[61978]: _type = "Task" [ 1519.288768] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.296200] env[61978]: DEBUG oslo_vmware.api [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396198, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.798843] env[61978]: DEBUG oslo_vmware.api [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396198, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21033} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.799132] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1519.799330] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1519.799552] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1519.799709] env[61978]: INFO nova.compute.manager [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1519.799962] env[61978]: DEBUG oslo.service.loopingcall [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.800184] env[61978]: DEBUG nova.compute.manager [-] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1519.800280] env[61978]: DEBUG nova.network.neutron [-] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1520.056976] env[61978]: DEBUG nova.compute.manager [req-e423c352-362e-4885-af08-4c901e046f78 req-0a1e5e7d-acff-434c-9ef2-eb2db6324f0d service nova] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Received event network-vif-deleted-280b4322-d003-439c-af46-b107eef37f87 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1520.057205] env[61978]: INFO nova.compute.manager [req-e423c352-362e-4885-af08-4c901e046f78 req-0a1e5e7d-acff-434c-9ef2-eb2db6324f0d service nova] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Neutron deleted interface 280b4322-d003-439c-af46-b107eef37f87; detaching it from the instance and deleting it from the info cache [ 1520.057386] env[61978]: DEBUG nova.network.neutron [req-e423c352-362e-4885-af08-4c901e046f78 req-0a1e5e7d-acff-434c-9ef2-eb2db6324f0d service nova] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.534257] env[61978]: DEBUG nova.network.neutron [-] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.559773] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b65957e-e999-47f5-be39-f5065dbf2ae9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.570265] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb06bec-ff32-4055-9cab-2b14dd99b731 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.596620] env[61978]: DEBUG nova.compute.manager [req-e423c352-362e-4885-af08-4c901e046f78 req-0a1e5e7d-acff-434c-9ef2-eb2db6324f0d service nova] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Detach interface failed, port_id=280b4322-d003-439c-af46-b107eef37f87, reason: Instance fe81b6d2-053f-4db4-b3dd-a67b21d02c52 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1521.037302] env[61978]: INFO nova.compute.manager [-] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Took 1.24 seconds to deallocate network for instance. [ 1521.543220] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.543524] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.543754] env[61978]: DEBUG nova.objects.instance [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lazy-loading 'resources' on Instance uuid fe81b6d2-053f-4db4-b3dd-a67b21d02c52 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1522.121389] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37ae580-ce46-4e5b-adcf-c1b1dd2e4eec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.130567] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cab0b1-0371-4894-bdbc-38f8e21a3df1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.158951] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69151ce1-d045-4485-85f6-0cfd75ec1209 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.165963] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6653c859-4c95-4f22-85af-b7c8e77ddcef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.178392] env[61978]: DEBUG nova.compute.provider_tree [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1522.681531] env[61978]: DEBUG nova.scheduler.client.report [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1523.186336] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.642s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.210265] env[61978]: INFO nova.scheduler.client.report [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted allocations for instance fe81b6d2-053f-4db4-b3dd-a67b21d02c52 [ 1523.718482] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1c7576ce-f416-4dbf-b835-6277744da99f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "fe81b6d2-053f-4db4-b3dd-a67b21d02c52" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.026s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.591088] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "d587cf18-1558-4e01-be53-3b7bf8287fdd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.591329] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "d587cf18-1558-4e01-be53-3b7bf8287fdd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.094227] env[61978]: DEBUG nova.compute.manager [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1525.617294] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.617546] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.619064] env[61978]: INFO nova.compute.claims [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1525.955572] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.955834] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.955947] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1525.956083] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1526.459756] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Skipping network cache update for instance because it is Building. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1526.501975] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.502147] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquired lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.502301] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Forcefully refreshing network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1526.502458] env[61978]: DEBUG nova.objects.instance [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lazy-loading 'info_cache' on Instance uuid 6a454083-8d85-4a29-98dc-29eb0a072560 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1526.691603] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6dab80-afb5-413f-9ac5-ed079ce6462e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.699117] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763edb3c-aca8-49f6-b9d2-c2bb25584ac0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.728304] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db02ad43-71e1-491e-a740-d4322324f037 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.734847] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3b0822-5adf-4560-85cb-39cc9b9a2f18 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.748553] env[61978]: DEBUG nova.compute.provider_tree [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1527.251537] env[61978]: DEBUG nova.scheduler.client.report [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1527.756379] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.139s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.756915] env[61978]: DEBUG nova.compute.manager [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1528.230619] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updating instance_info_cache with network_info: [{"id": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "address": "fa:16:3e:c3:c6:fc", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c4911ae-3f", "ovs_interfaceid": "6c4911ae-3f5e-46c7-9538-7e1a9811252e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.261968] env[61978]: DEBUG nova.compute.utils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1528.263240] env[61978]: DEBUG nova.compute.manager [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1528.263412] env[61978]: DEBUG nova.network.neutron [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1528.300151] env[61978]: DEBUG nova.policy [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df950ca91cd64479950545608f749fb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaa4a0cb1a4c45949b43032fd9395200', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1528.544392] env[61978]: DEBUG nova.network.neutron [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Successfully created port: ff1be352-d287-4618-a8a0-bd1affb328ec {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1528.733854] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Releasing lock "refresh_cache-6a454083-8d85-4a29-98dc-29eb0a072560" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.734184] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updated the network info_cache for instance {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1528.734412] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.734582] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.734766] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.734918] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.735078] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.735231] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.735362] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1528.735510] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.766274] env[61978]: DEBUG nova.compute.manager [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1529.238407] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.238719] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.238894] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.239067] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1529.240050] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c93144-5b95-478d-b672-20bc99c3f450 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.248575] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880f85f5-0e77-4697-8fd8-b8cdb355dc9d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.263111] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1d9c50-eaa1-44ac-baec-d0ea92601062 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.269511] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bba78d7-e3c8-4389-967a-5ad4a7589351 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.302388] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181304MB free_disk=185GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1529.302561] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.302759] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.779186] env[61978]: DEBUG nova.compute.manager [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1529.806961] env[61978]: DEBUG nova.virt.hardware [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1529.807236] env[61978]: DEBUG nova.virt.hardware [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1529.807403] env[61978]: DEBUG nova.virt.hardware [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1529.807679] env[61978]: DEBUG nova.virt.hardware [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1529.807843] env[61978]: DEBUG nova.virt.hardware [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1529.808014] env[61978]: DEBUG nova.virt.hardware [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1529.808235] env[61978]: DEBUG nova.virt.hardware [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1529.808467] env[61978]: DEBUG nova.virt.hardware [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1529.808829] env[61978]: DEBUG nova.virt.hardware [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1529.808829] env[61978]: DEBUG nova.virt.hardware [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1529.808940] env[61978]: DEBUG nova.virt.hardware [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1529.812556] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ad9724-3bcc-4a14-98cf-39c5307bd806 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.821249] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4be6c0c-4446-4341-98f1-dfb70a89edd7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.924350] env[61978]: DEBUG nova.compute.manager [req-8c26b1d6-ad3e-4ac4-b1b5-44b974858105 req-f48a7bf2-2907-467f-b1aa-f6755458518f service nova] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Received event network-vif-plugged-ff1be352-d287-4618-a8a0-bd1affb328ec {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1529.924587] env[61978]: DEBUG oslo_concurrency.lockutils [req-8c26b1d6-ad3e-4ac4-b1b5-44b974858105 req-f48a7bf2-2907-467f-b1aa-f6755458518f service nova] Acquiring lock "d587cf18-1558-4e01-be53-3b7bf8287fdd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.924799] env[61978]: DEBUG oslo_concurrency.lockutils [req-8c26b1d6-ad3e-4ac4-b1b5-44b974858105 req-f48a7bf2-2907-467f-b1aa-f6755458518f service nova] Lock "d587cf18-1558-4e01-be53-3b7bf8287fdd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.924975] env[61978]: DEBUG oslo_concurrency.lockutils [req-8c26b1d6-ad3e-4ac4-b1b5-44b974858105 req-f48a7bf2-2907-467f-b1aa-f6755458518f service nova] Lock "d587cf18-1558-4e01-be53-3b7bf8287fdd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.925163] env[61978]: DEBUG nova.compute.manager [req-8c26b1d6-ad3e-4ac4-b1b5-44b974858105 req-f48a7bf2-2907-467f-b1aa-f6755458518f service nova] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] No waiting events found dispatching network-vif-plugged-ff1be352-d287-4618-a8a0-bd1affb328ec {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1529.925334] env[61978]: WARNING nova.compute.manager [req-8c26b1d6-ad3e-4ac4-b1b5-44b974858105 req-f48a7bf2-2907-467f-b1aa-f6755458518f service nova] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Received unexpected event network-vif-plugged-ff1be352-d287-4618-a8a0-bd1affb328ec for instance with vm_state building and task_state spawning. [ 1530.015210] env[61978]: DEBUG nova.network.neutron [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Successfully updated port: ff1be352-d287-4618-a8a0-bd1affb328ec {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1530.336650] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance a6f73332-d0a5-4c52-8e38-8982e42ee62f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.336806] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 733c2f53-04d3-4a8b-a7c1-5194d7961a31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.336931] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 77a8cde0-b046-4970-9979-9d4b85a224e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.337062] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 6a454083-8d85-4a29-98dc-29eb0a072560 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.337181] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance d587cf18-1558-4e01-be53-3b7bf8287fdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1530.337365] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1530.337508] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1530.401109] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef84f74-3759-4bac-a88d-c6db2be5c60b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.410011] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-308859f0-4bb3-4157-8d0a-8e8c3e1be3d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.438821] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198ae7c3-44f8-41bb-91ad-98c5a26ef6b1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.445769] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1d44d8-085d-4b3e-a43d-e9ebb8cf29f4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.458059] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "77a8cde0-b046-4970-9979-9d4b85a224e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.458577] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "77a8cde0-b046-4970-9979-9d4b85a224e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.458577] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "77a8cde0-b046-4970-9979-9d4b85a224e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.458712] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "77a8cde0-b046-4970-9979-9d4b85a224e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.458867] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "77a8cde0-b046-4970-9979-9d4b85a224e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.460585] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.461902] env[61978]: INFO nova.compute.manager [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Terminating instance [ 1530.465741] env[61978]: DEBUG nova.compute.manager [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1530.465943] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1530.466669] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df323cac-f855-434f-979c-74a3527e0d8f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.473457] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1530.473679] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2038738a-ef95-4506-8cfd-b13a0d0f3d62 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.479880] env[61978]: DEBUG oslo_vmware.api [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1530.479880] env[61978]: value = "task-1396199" [ 1530.479880] env[61978]: _type = "Task" [ 1530.479880] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.487714] env[61978]: DEBUG oslo_vmware.api [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396199, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.516880] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "refresh_cache-d587cf18-1558-4e01-be53-3b7bf8287fdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.517043] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "refresh_cache-d587cf18-1558-4e01-be53-3b7bf8287fdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.517198] env[61978]: DEBUG nova.network.neutron [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1530.964720] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1530.990441] env[61978]: DEBUG oslo_vmware.api [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396199, 'name': PowerOffVM_Task, 'duration_secs': 0.218792} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.990697] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1530.990874] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1530.991131] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef4f2530-731e-41b6-a4f9-5a33ae48aa6e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.048422] env[61978]: DEBUG nova.network.neutron [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1531.054403] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1531.054633] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1531.055310] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Deleting the datastore file [datastore2] 77a8cde0-b046-4970-9979-9d4b85a224e2 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1531.055310] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5236a52-c564-4472-bf99-f5df4ffe2bc8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.060933] env[61978]: DEBUG oslo_vmware.api [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1531.060933] env[61978]: value = "task-1396201" [ 1531.060933] env[61978]: _type = "Task" [ 1531.060933] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.070561] env[61978]: DEBUG oslo_vmware.api [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396201, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.191718] env[61978]: DEBUG nova.network.neutron [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Updating instance_info_cache with network_info: [{"id": "ff1be352-d287-4618-a8a0-bd1affb328ec", "address": "fa:16:3e:ae:8a:da", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1be352-d2", "ovs_interfaceid": "ff1be352-d287-4618-a8a0-bd1affb328ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.468886] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1531.469102] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.166s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.571014] env[61978]: DEBUG oslo_vmware.api [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396201, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140842} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.571300] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1531.571490] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1531.571730] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1531.571858] env[61978]: INFO nova.compute.manager [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1531.572115] env[61978]: DEBUG oslo.service.loopingcall [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1531.572324] env[61978]: DEBUG nova.compute.manager [-] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1531.572417] env[61978]: DEBUG nova.network.neutron [-] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1531.694488] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "refresh_cache-d587cf18-1558-4e01-be53-3b7bf8287fdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.694822] env[61978]: DEBUG nova.compute.manager [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Instance network_info: |[{"id": "ff1be352-d287-4618-a8a0-bd1affb328ec", "address": "fa:16:3e:ae:8a:da", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1be352-d2", "ovs_interfaceid": "ff1be352-d287-4618-a8a0-bd1affb328ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1531.695287] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:8a:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb68953b-dee5-4d9d-b47b-277336ba76dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff1be352-d287-4618-a8a0-bd1affb328ec', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1531.702845] env[61978]: DEBUG oslo.service.loopingcall [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1531.703084] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1531.703316] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b403ae0-0c21-4f6f-9527-6de475abb268 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.722807] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1531.722807] env[61978]: value = "task-1396202" [ 1531.722807] env[61978]: _type = "Task" [ 1531.722807] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.730042] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396202, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.949946] env[61978]: DEBUG nova.compute.manager [req-410f363a-8799-4c77-9249-6268a56ef98b req-81569248-27df-47f7-9615-1f131e606b35 service nova] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Received event network-changed-ff1be352-d287-4618-a8a0-bd1affb328ec {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1531.950059] env[61978]: DEBUG nova.compute.manager [req-410f363a-8799-4c77-9249-6268a56ef98b req-81569248-27df-47f7-9615-1f131e606b35 service nova] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Refreshing instance network info cache due to event network-changed-ff1be352-d287-4618-a8a0-bd1affb328ec. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1531.950295] env[61978]: DEBUG oslo_concurrency.lockutils [req-410f363a-8799-4c77-9249-6268a56ef98b req-81569248-27df-47f7-9615-1f131e606b35 service nova] Acquiring lock "refresh_cache-d587cf18-1558-4e01-be53-3b7bf8287fdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.950453] env[61978]: DEBUG oslo_concurrency.lockutils [req-410f363a-8799-4c77-9249-6268a56ef98b req-81569248-27df-47f7-9615-1f131e606b35 service nova] Acquired lock "refresh_cache-d587cf18-1558-4e01-be53-3b7bf8287fdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.950642] env[61978]: DEBUG nova.network.neutron [req-410f363a-8799-4c77-9249-6268a56ef98b req-81569248-27df-47f7-9615-1f131e606b35 service nova] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Refreshing network info cache for port ff1be352-d287-4618-a8a0-bd1affb328ec {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1532.232816] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396202, 'name': CreateVM_Task, 'duration_secs': 0.305476} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.233129] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1532.234626] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.234626] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.234626] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1532.234626] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a918ff88-bc08-4d4d-99c0-ed8043476021 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.239151] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1532.239151] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a8f3a0-3517-2f5a-c12c-cbf559dc4992" [ 1532.239151] env[61978]: _type = "Task" [ 1532.239151] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.247445] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a8f3a0-3517-2f5a-c12c-cbf559dc4992, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.503550] env[61978]: DEBUG nova.network.neutron [-] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.660565] env[61978]: DEBUG nova.network.neutron [req-410f363a-8799-4c77-9249-6268a56ef98b req-81569248-27df-47f7-9615-1f131e606b35 service nova] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Updated VIF entry in instance network info cache for port ff1be352-d287-4618-a8a0-bd1affb328ec. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1532.660924] env[61978]: DEBUG nova.network.neutron [req-410f363a-8799-4c77-9249-6268a56ef98b req-81569248-27df-47f7-9615-1f131e606b35 service nova] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Updating instance_info_cache with network_info: [{"id": "ff1be352-d287-4618-a8a0-bd1affb328ec", "address": "fa:16:3e:ae:8a:da", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1be352-d2", "ovs_interfaceid": "ff1be352-d287-4618-a8a0-bd1affb328ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.749661] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a8f3a0-3517-2f5a-c12c-cbf559dc4992, 'name': SearchDatastore_Task, 'duration_secs': 0.010187} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.749926] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.750178] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1532.750423] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.750575] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.750756] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1532.751049] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-094c8a69-979c-419f-ada7-8a8d484c85dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.759131] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1532.759312] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1532.759994] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47ae2a2e-c92a-48ea-b643-1e081cef83d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.764818] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1532.764818] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eea804-e923-b0bf-b1e5-b7d9b82a65e6" [ 1532.764818] env[61978]: _type = "Task" [ 1532.764818] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.772049] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eea804-e923-b0bf-b1e5-b7d9b82a65e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.006677] env[61978]: INFO nova.compute.manager [-] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Took 1.43 seconds to deallocate network for instance. [ 1533.166398] env[61978]: DEBUG oslo_concurrency.lockutils [req-410f363a-8799-4c77-9249-6268a56ef98b req-81569248-27df-47f7-9615-1f131e606b35 service nova] Releasing lock "refresh_cache-d587cf18-1558-4e01-be53-3b7bf8287fdd" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.276051] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52eea804-e923-b0bf-b1e5-b7d9b82a65e6, 'name': SearchDatastore_Task, 'duration_secs': 0.008923} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.276854] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d1e7bc6-8912-416f-bf6a-75e2bdad7afc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.281680] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1533.281680] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5266c527-63b0-0055-6c9d-b8114f570fbc" [ 1533.281680] env[61978]: _type = "Task" [ 1533.281680] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.288823] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5266c527-63b0-0055-6c9d-b8114f570fbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.513747] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.513981] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.514241] env[61978]: DEBUG nova.objects.instance [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lazy-loading 'resources' on Instance uuid 77a8cde0-b046-4970-9979-9d4b85a224e2 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1533.792059] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5266c527-63b0-0055-6c9d-b8114f570fbc, 'name': SearchDatastore_Task, 'duration_secs': 0.009496} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.792059] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.792059] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] d587cf18-1558-4e01-be53-3b7bf8287fdd/d587cf18-1558-4e01-be53-3b7bf8287fdd.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1533.792438] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48d1ca17-ced0-4236-9a7b-6709353ed075 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.798225] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1533.798225] env[61978]: value = "task-1396203" [ 1533.798225] env[61978]: _type = "Task" [ 1533.798225] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.805409] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396203, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.978198] env[61978]: DEBUG nova.compute.manager [req-4050a313-f3b0-4571-b08e-32c67e22c19a req-57a6e777-922d-4c5e-bbc5-f41281769bf3 service nova] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Received event network-vif-deleted-d8c08c29-7a03-4668-9677-a2fd2c6b39eb {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1534.103746] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b162838-b588-4fed-bd96-cdb9ff1afc3c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.112367] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324790ff-1e1e-4159-bf51-7b162aee3fe0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.143346] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8793b701-2e73-4030-bc79-de3e8519d840 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.151251] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f528e24e-f5cb-4abd-b9e8-1e992eea5583 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.166191] env[61978]: DEBUG nova.compute.provider_tree [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1534.308305] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396203, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462126} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.308683] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] d587cf18-1558-4e01-be53-3b7bf8287fdd/d587cf18-1558-4e01-be53-3b7bf8287fdd.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1534.308762] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1534.308999] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e889178-e443-47fa-9718-09fa91fa25a4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.315590] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1534.315590] env[61978]: value = "task-1396204" [ 1534.315590] env[61978]: _type = "Task" [ 1534.315590] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.322893] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396204, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.669337] env[61978]: DEBUG nova.scheduler.client.report [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1534.824627] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396204, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073765} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.824877] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1534.825637] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0212dd-bd41-45f8-9bc2-1a7321182d46 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.846473] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] d587cf18-1558-4e01-be53-3b7bf8287fdd/d587cf18-1558-4e01-be53-3b7bf8287fdd.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1534.846726] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd8321e3-6d96-49ec-b360-8d4ab7e0ac8d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.865144] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1534.865144] env[61978]: value = "task-1396205" [ 1534.865144] env[61978]: _type = "Task" [ 1534.865144] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.872448] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396205, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.173868] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.660s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.191744] env[61978]: INFO nova.scheduler.client.report [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Deleted allocations for instance 77a8cde0-b046-4970-9979-9d4b85a224e2 [ 1535.375209] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396205, 'name': ReconfigVM_Task, 'duration_secs': 0.270419} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.375598] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Reconfigured VM instance instance-00000078 to attach disk [datastore2] d587cf18-1558-4e01-be53-3b7bf8287fdd/d587cf18-1558-4e01-be53-3b7bf8287fdd.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1535.376944] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d276dc2-8f12-49f8-b551-a2540048d601 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.382203] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1535.382203] env[61978]: value = "task-1396206" [ 1535.382203] env[61978]: _type = "Task" [ 1535.382203] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.389842] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396206, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.699480] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6929fa53-8d61-4241-8ed5-6d24416c2d18 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "77a8cde0-b046-4970-9979-9d4b85a224e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.241s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.892964] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396206, 'name': Rename_Task, 'duration_secs': 0.129091} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.893272] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1535.893531] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf794822-d0fa-4781-877c-c6c341c2e61f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.899832] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1535.899832] env[61978]: value = "task-1396207" [ 1535.899832] env[61978]: _type = "Task" [ 1535.899832] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.906583] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396207, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.334991] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.335294] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.409301] env[61978]: DEBUG oslo_vmware.api [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396207, 'name': PowerOnVM_Task, 'duration_secs': 0.448153} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.409841] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1536.410064] env[61978]: INFO nova.compute.manager [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Took 6.63 seconds to spawn the instance on the hypervisor. [ 1536.410253] env[61978]: DEBUG nova.compute.manager [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1536.410987] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d466eaa-b9ec-4e41-8d61-a61c9718dcd0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.838169] env[61978]: INFO nova.compute.manager [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Detaching volume a4d41e66-b964-4608-8f3d-36e9fc95a5ef [ 1536.870332] env[61978]: INFO nova.virt.block_device [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Attempting to driver detach volume a4d41e66-b964-4608-8f3d-36e9fc95a5ef from mountpoint /dev/sdb [ 1536.870533] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1536.870731] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296076', 'volume_id': 'a4d41e66-b964-4608-8f3d-36e9fc95a5ef', 'name': 'volume-a4d41e66-b964-4608-8f3d-36e9fc95a5ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6f73332-d0a5-4c52-8e38-8982e42ee62f', 'attached_at': '', 'detached_at': '', 'volume_id': 'a4d41e66-b964-4608-8f3d-36e9fc95a5ef', 'serial': 'a4d41e66-b964-4608-8f3d-36e9fc95a5ef'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1536.871642] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3db714-3fcd-457e-8f69-4010d87b2c78 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.893365] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbf30c3-3d60-465b-8de2-2174cb521e97 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.900548] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223cd59e-82d7-4d0d-b06a-6f00db22c70f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.920009] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f395380-8c53-4cb3-aff8-d601bc481e74 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.928045] env[61978]: INFO nova.compute.manager [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Took 11.33 seconds to build instance. [ 1536.938574] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] The volume has not been displaced from its original location: [datastore2] volume-a4d41e66-b964-4608-8f3d-36e9fc95a5ef/volume-a4d41e66-b964-4608-8f3d-36e9fc95a5ef.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1536.943797] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Reconfiguring VM instance instance-00000072 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1536.944746] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6d63448-551e-43a7-9627-38d4c10657f3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.963548] env[61978]: DEBUG oslo_vmware.api [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1536.963548] env[61978]: value = "task-1396208" [ 1536.963548] env[61978]: _type = "Task" [ 1536.963548] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.974519] env[61978]: DEBUG oslo_vmware.api [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396208, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.445058] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6491f5c8-372a-47ca-94f1-eb336c3b5933 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "d587cf18-1558-4e01-be53-3b7bf8287fdd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.853s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.473552] env[61978]: DEBUG oslo_vmware.api [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396208, 'name': ReconfigVM_Task, 'duration_secs': 0.30459} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.473823] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Reconfigured VM instance instance-00000072 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1537.479539] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b3e3c9b-caaa-4a60-a1c9-6a19841bebcd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.495823] env[61978]: DEBUG oslo_vmware.api [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1537.495823] env[61978]: value = "task-1396209" [ 1537.495823] env[61978]: _type = "Task" [ 1537.495823] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.503670] env[61978]: DEBUG oslo_vmware.api [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396209, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.006092] env[61978]: DEBUG oslo_vmware.api [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396209, 'name': ReconfigVM_Task, 'duration_secs': 0.149577} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.006432] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296076', 'volume_id': 'a4d41e66-b964-4608-8f3d-36e9fc95a5ef', 'name': 'volume-a4d41e66-b964-4608-8f3d-36e9fc95a5ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6f73332-d0a5-4c52-8e38-8982e42ee62f', 'attached_at': '', 'detached_at': '', 'volume_id': 'a4d41e66-b964-4608-8f3d-36e9fc95a5ef', 'serial': 'a4d41e66-b964-4608-8f3d-36e9fc95a5ef'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1538.086333] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "bc2d4609-bb75-48e7-859b-7cbb02041f52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.086333] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "bc2d4609-bb75-48e7-859b-7cbb02041f52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.548788] env[61978]: DEBUG nova.objects.instance [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lazy-loading 'flavor' on Instance uuid a6f73332-d0a5-4c52-8e38-8982e42ee62f {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1538.588210] env[61978]: DEBUG nova.compute.manager [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1539.110092] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.110379] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.111926] env[61978]: INFO nova.compute.claims [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1539.556644] env[61978]: DEBUG oslo_concurrency.lockutils [None req-3755d5ae-da4c-48b4-a455-8661f41d9c2f tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.221s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.187545] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597050d0-d533-40c7-b600-4aae1c9b2afe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.194985] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30725198-2627-4d56-bec6-7c393fe9ea46 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.224317] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13f9c1f-1707-4dd3-a40d-ad9256a35607 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.235160] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b7d6ef-c23a-464b-9b3a-42045af071f4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.248616] env[61978]: DEBUG nova.compute.provider_tree [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1540.570992] env[61978]: DEBUG oslo_concurrency.lockutils [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.571377] env[61978]: DEBUG oslo_concurrency.lockutils [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.571718] env[61978]: DEBUG oslo_concurrency.lockutils [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.572032] env[61978]: DEBUG oslo_concurrency.lockutils [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.572321] env[61978]: DEBUG oslo_concurrency.lockutils [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.575742] env[61978]: INFO nova.compute.manager [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Terminating instance [ 1540.578213] env[61978]: DEBUG nova.compute.manager [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1540.578531] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1540.579769] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3c896f-cfc3-487f-b9d3-8ee2e8212ecf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.589268] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1540.589543] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c986b432-3e06-4cfc-9341-28679219877e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.596478] env[61978]: DEBUG oslo_vmware.api [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1540.596478] env[61978]: value = "task-1396210" [ 1540.596478] env[61978]: _type = "Task" [ 1540.596478] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.605568] env[61978]: DEBUG oslo_vmware.api [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396210, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.752400] env[61978]: DEBUG nova.scheduler.client.report [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1541.106201] env[61978]: DEBUG oslo_vmware.api [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396210, 'name': PowerOffVM_Task, 'duration_secs': 0.18672} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.106657] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1541.106946] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1541.107372] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3b714b0-a228-453b-b653-2640d245cdab {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.257803] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.147s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.258376] env[61978]: DEBUG nova.compute.manager [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1541.269118] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1541.269342] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1541.269539] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Deleting the datastore file [datastore2] a6f73332-d0a5-4c52-8e38-8982e42ee62f {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1541.269814] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3bd33561-5eac-4c40-a86c-1f4edbd9008b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.276170] env[61978]: DEBUG oslo_vmware.api [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1541.276170] env[61978]: value = "task-1396212" [ 1541.276170] env[61978]: _type = "Task" [ 1541.276170] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.283599] env[61978]: DEBUG oslo_vmware.api [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.763284] env[61978]: DEBUG nova.compute.utils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1541.764724] env[61978]: DEBUG nova.compute.manager [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1541.764895] env[61978]: DEBUG nova.network.neutron [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1541.786887] env[61978]: DEBUG oslo_vmware.api [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132892} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.787098] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1541.787288] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1541.787470] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1541.787645] env[61978]: INFO nova.compute.manager [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1541.787885] env[61978]: DEBUG oslo.service.loopingcall [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1541.788092] env[61978]: DEBUG nova.compute.manager [-] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1541.788179] env[61978]: DEBUG nova.network.neutron [-] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1541.818375] env[61978]: DEBUG nova.policy [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df950ca91cd64479950545608f749fb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaa4a0cb1a4c45949b43032fd9395200', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1542.254767] env[61978]: DEBUG nova.network.neutron [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Successfully created port: 1e5b6479-cb7d-422f-ab6e-a24f660960ce {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1542.267445] env[61978]: DEBUG nova.compute.manager [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1542.728208] env[61978]: DEBUG nova.compute.manager [req-94c3eb30-4504-4e32-8dba-b452027d6c38 req-6e52c7a5-09f4-472e-a86f-6b90dcbd4206 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Received event network-vif-deleted-204d4c58-f413-4204-b406-205812a3832d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1542.728836] env[61978]: INFO nova.compute.manager [req-94c3eb30-4504-4e32-8dba-b452027d6c38 req-6e52c7a5-09f4-472e-a86f-6b90dcbd4206 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Neutron deleted interface 204d4c58-f413-4204-b406-205812a3832d; detaching it from the instance and deleting it from the info cache [ 1542.728836] env[61978]: DEBUG nova.network.neutron [req-94c3eb30-4504-4e32-8dba-b452027d6c38 req-6e52c7a5-09f4-472e-a86f-6b90dcbd4206 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.206024] env[61978]: DEBUG nova.network.neutron [-] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.231936] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ceeec446-a3a9-43b8-bca9-188e455f73ce {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.241957] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc9f342-9f81-4e20-88aa-cf3507b4b534 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.267533] env[61978]: DEBUG nova.compute.manager [req-94c3eb30-4504-4e32-8dba-b452027d6c38 req-6e52c7a5-09f4-472e-a86f-6b90dcbd4206 service nova] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Detach interface failed, port_id=204d4c58-f413-4204-b406-205812a3832d, reason: Instance a6f73332-d0a5-4c52-8e38-8982e42ee62f could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1543.277608] env[61978]: DEBUG nova.compute.manager [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1543.307451] env[61978]: DEBUG nova.virt.hardware [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1543.307729] env[61978]: DEBUG nova.virt.hardware [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1543.307895] env[61978]: DEBUG nova.virt.hardware [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1543.308101] env[61978]: DEBUG nova.virt.hardware [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1543.308262] env[61978]: DEBUG nova.virt.hardware [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1543.308416] env[61978]: DEBUG nova.virt.hardware [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1543.308666] env[61978]: DEBUG nova.virt.hardware [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1543.308857] env[61978]: DEBUG nova.virt.hardware [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1543.309048] env[61978]: DEBUG nova.virt.hardware [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1543.309224] env[61978]: DEBUG nova.virt.hardware [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1543.309403] env[61978]: DEBUG nova.virt.hardware [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1543.310398] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd148496-d5b6-4f95-bb77-b35962da6e19 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.318128] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab28c184-af3c-4e2d-a95e-a8d5755c7589 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.708271] env[61978]: INFO nova.compute.manager [-] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Took 1.92 seconds to deallocate network for instance. [ 1544.048822] env[61978]: DEBUG nova.network.neutron [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Successfully updated port: 1e5b6479-cb7d-422f-ab6e-a24f660960ce {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1544.214668] env[61978]: DEBUG oslo_concurrency.lockutils [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.215022] env[61978]: DEBUG oslo_concurrency.lockutils [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.215190] env[61978]: DEBUG nova.objects.instance [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lazy-loading 'resources' on Instance uuid a6f73332-d0a5-4c52-8e38-8982e42ee62f {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1544.551953] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "refresh_cache-bc2d4609-bb75-48e7-859b-7cbb02041f52" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.552156] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "refresh_cache-bc2d4609-bb75-48e7-859b-7cbb02041f52" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.552339] env[61978]: DEBUG nova.network.neutron [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1544.754398] env[61978]: DEBUG nova.compute.manager [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Received event network-vif-plugged-1e5b6479-cb7d-422f-ab6e-a24f660960ce {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1544.754667] env[61978]: DEBUG oslo_concurrency.lockutils [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] Acquiring lock "bc2d4609-bb75-48e7-859b-7cbb02041f52-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.754857] env[61978]: DEBUG oslo_concurrency.lockutils [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] Lock "bc2d4609-bb75-48e7-859b-7cbb02041f52-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.755011] env[61978]: DEBUG oslo_concurrency.lockutils [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] Lock "bc2d4609-bb75-48e7-859b-7cbb02041f52-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.755200] env[61978]: DEBUG nova.compute.manager [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] No waiting events found dispatching network-vif-plugged-1e5b6479-cb7d-422f-ab6e-a24f660960ce {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1544.755370] env[61978]: WARNING nova.compute.manager [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Received unexpected event network-vif-plugged-1e5b6479-cb7d-422f-ab6e-a24f660960ce for instance with vm_state building and task_state spawning. [ 1544.755537] env[61978]: DEBUG nova.compute.manager [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Received event network-changed-1e5b6479-cb7d-422f-ab6e-a24f660960ce {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1544.755707] env[61978]: DEBUG nova.compute.manager [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Refreshing instance network info cache due to event network-changed-1e5b6479-cb7d-422f-ab6e-a24f660960ce. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1544.755879] env[61978]: DEBUG oslo_concurrency.lockutils [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] Acquiring lock "refresh_cache-bc2d4609-bb75-48e7-859b-7cbb02041f52" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.797604] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c581ebff-49a0-4549-a1ba-30080786203d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.805568] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43123542-fb1d-4626-aab0-b6043c807315 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.835736] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c2a616-5285-44a0-b7d2-ab858ce48ec3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.843133] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478cfbbc-d8e8-4b92-949c-0f9348527548 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.858152] env[61978]: DEBUG nova.compute.provider_tree [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1545.085558] env[61978]: DEBUG nova.network.neutron [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1545.232897] env[61978]: DEBUG nova.network.neutron [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Updating instance_info_cache with network_info: [{"id": "1e5b6479-cb7d-422f-ab6e-a24f660960ce", "address": "fa:16:3e:0c:64:12", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e5b6479-cb", "ovs_interfaceid": "1e5b6479-cb7d-422f-ab6e-a24f660960ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.361434] env[61978]: DEBUG nova.scheduler.client.report [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1545.735487] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "refresh_cache-bc2d4609-bb75-48e7-859b-7cbb02041f52" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1545.735815] env[61978]: DEBUG nova.compute.manager [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Instance network_info: |[{"id": "1e5b6479-cb7d-422f-ab6e-a24f660960ce", "address": "fa:16:3e:0c:64:12", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e5b6479-cb", "ovs_interfaceid": "1e5b6479-cb7d-422f-ab6e-a24f660960ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1545.736197] env[61978]: DEBUG oslo_concurrency.lockutils [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] Acquired lock "refresh_cache-bc2d4609-bb75-48e7-859b-7cbb02041f52" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.736411] env[61978]: DEBUG nova.network.neutron [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Refreshing network info cache for port 1e5b6479-cb7d-422f-ab6e-a24f660960ce {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1545.738052] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:64:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb68953b-dee5-4d9d-b47b-277336ba76dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e5b6479-cb7d-422f-ab6e-a24f660960ce', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1545.745730] env[61978]: DEBUG oslo.service.loopingcall [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1545.746783] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1545.747043] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c1527a4-92f3-4ffc-af6b-674130ee2cb0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.768282] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1545.768282] env[61978]: value = "task-1396213" [ 1545.768282] env[61978]: _type = "Task" [ 1545.768282] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.776241] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396213, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.867271] env[61978]: DEBUG oslo_concurrency.lockutils [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.652s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.889365] env[61978]: INFO nova.scheduler.client.report [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Deleted allocations for instance a6f73332-d0a5-4c52-8e38-8982e42ee62f [ 1546.279517] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396213, 'name': CreateVM_Task, 'duration_secs': 0.342698} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.279802] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1546.280429] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.280612] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.280940] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1546.281209] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2830cf04-b357-4f66-aeca-9fb80a20eb13 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.285590] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1546.285590] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5228efc8-2c61-5785-64c9-14781ceef0da" [ 1546.285590] env[61978]: _type = "Task" [ 1546.285590] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.293246] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5228efc8-2c61-5785-64c9-14781ceef0da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.396502] env[61978]: DEBUG oslo_concurrency.lockutils [None req-34812f35-e86d-4335-b35b-0df60a4b6132 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a6f73332-d0a5-4c52-8e38-8982e42ee62f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.825s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.447649] env[61978]: DEBUG nova.network.neutron [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Updated VIF entry in instance network info cache for port 1e5b6479-cb7d-422f-ab6e-a24f660960ce. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1546.448040] env[61978]: DEBUG nova.network.neutron [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Updating instance_info_cache with network_info: [{"id": "1e5b6479-cb7d-422f-ab6e-a24f660960ce", "address": "fa:16:3e:0c:64:12", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e5b6479-cb", "ovs_interfaceid": "1e5b6479-cb7d-422f-ab6e-a24f660960ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.795575] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5228efc8-2c61-5785-64c9-14781ceef0da, 'name': SearchDatastore_Task, 'duration_secs': 0.012424} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.795889] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.796144] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1546.796388] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.796542] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.796720] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1546.796976] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50e359bf-8e99-4f8c-ab40-8a0ec44d85a4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.805261] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1546.805441] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1546.806161] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8730994-8d51-4c8e-b486-0212b5d88e91 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.811053] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1546.811053] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52311d61-99f2-5b1c-d90f-46a48c90c2b0" [ 1546.811053] env[61978]: _type = "Task" [ 1546.811053] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.819631] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52311d61-99f2-5b1c-d90f-46a48c90c2b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.950524] env[61978]: DEBUG oslo_concurrency.lockutils [req-74301bc3-d3f8-4fda-9f7a-172a01018d23 req-79031362-cd86-4a76-be36-f2edf3b64cd6 service nova] Releasing lock "refresh_cache-bc2d4609-bb75-48e7-859b-7cbb02041f52" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.321869] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52311d61-99f2-5b1c-d90f-46a48c90c2b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009894} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.322885] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea923d16-3724-4cd6-a2bb-ce7b061d26e5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.328037] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1547.328037] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52987856-5f44-7a0c-e52d-3c5ddc05af46" [ 1547.328037] env[61978]: _type = "Task" [ 1547.328037] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.337251] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52987856-5f44-7a0c-e52d-3c5ddc05af46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.838948] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52987856-5f44-7a0c-e52d-3c5ddc05af46, 'name': SearchDatastore_Task, 'duration_secs': 0.009521} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.839306] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.839581] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] bc2d4609-bb75-48e7-859b-7cbb02041f52/bc2d4609-bb75-48e7-859b-7cbb02041f52.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1547.839851] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43ecf04c-8752-4285-9a55-360b4bffd7f8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.847200] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1547.847200] env[61978]: value = "task-1396214" [ 1547.847200] env[61978]: _type = "Task" [ 1547.847200] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.854957] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396214, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.356642] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396214, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480485} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.356977] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] bc2d4609-bb75-48e7-859b-7cbb02041f52/bc2d4609-bb75-48e7-859b-7cbb02041f52.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1548.357153] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1548.357416] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf95232b-86e5-40ef-801e-a42b8fcfa55b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.363622] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1548.363622] env[61978]: value = "task-1396215" [ 1548.363622] env[61978]: _type = "Task" [ 1548.363622] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.370813] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396215, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.702214] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.702532] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.873138] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06824} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.873390] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1548.874135] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ad3eb6-92db-4edf-9a4e-56609a838770 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.894647] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] bc2d4609-bb75-48e7-859b-7cbb02041f52/bc2d4609-bb75-48e7-859b-7cbb02041f52.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1548.894875] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b591a11-5417-4e7c-931e-fe465822b62f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.912739] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1548.912739] env[61978]: value = "task-1396216" [ 1548.912739] env[61978]: _type = "Task" [ 1548.912739] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.919602] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396216, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.204727] env[61978]: DEBUG nova.compute.manager [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1549.422891] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396216, 'name': ReconfigVM_Task, 'duration_secs': 0.2966} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.423232] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Reconfigured VM instance instance-00000079 to attach disk [datastore2] bc2d4609-bb75-48e7-859b-7cbb02041f52/bc2d4609-bb75-48e7-859b-7cbb02041f52.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1549.423774] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d66ef8d-855a-4e65-a98d-690d8cb756ba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.430151] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1549.430151] env[61978]: value = "task-1396217" [ 1549.430151] env[61978]: _type = "Task" [ 1549.430151] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.437602] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396217, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.727294] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.727570] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.729153] env[61978]: INFO nova.compute.claims [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1549.939796] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396217, 'name': Rename_Task, 'duration_secs': 0.176822} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.940095] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1549.940389] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-757650bb-2b8e-4c6b-a22d-14ca31bd369f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.946572] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1549.946572] env[61978]: value = "task-1396218" [ 1549.946572] env[61978]: _type = "Task" [ 1549.946572] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.953585] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.456382] env[61978]: DEBUG oslo_vmware.api [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396218, 'name': PowerOnVM_Task, 'duration_secs': 0.468962} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.456702] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1550.456856] env[61978]: INFO nova.compute.manager [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Took 7.18 seconds to spawn the instance on the hypervisor. [ 1550.457055] env[61978]: DEBUG nova.compute.manager [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1550.457822] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3793cd3e-4a26-4a69-bbc7-b7dd7afa5177 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.817063] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75614abb-8979-435f-9478-68dafeb9caae {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.827308] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b9c5ed-2046-47f6-8a60-a044452532de {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.865454] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc64c5f-b5a7-4c37-aaa0-fad119b24425 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.872822] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10735f2-e379-44d8-95a7-ffa3ca747e97 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.885637] env[61978]: DEBUG nova.compute.provider_tree [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1550.938218] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "6a454083-8d85-4a29-98dc-29eb0a072560" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.938490] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "6a454083-8d85-4a29-98dc-29eb0a072560" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.974808] env[61978]: INFO nova.compute.manager [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Took 11.88 seconds to build instance. [ 1551.389529] env[61978]: DEBUG nova.scheduler.client.report [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1551.442146] env[61978]: INFO nova.compute.manager [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Detaching volume 92c75742-2007-44d7-9c7e-705254285c91 [ 1551.447449] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "bc2d4609-bb75-48e7-859b-7cbb02041f52" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.473558] env[61978]: INFO nova.virt.block_device [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Attempting to driver detach volume 92c75742-2007-44d7-9c7e-705254285c91 from mountpoint /dev/sdb [ 1551.473827] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1551.474011] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296082', 'volume_id': '92c75742-2007-44d7-9c7e-705254285c91', 'name': 'volume-92c75742-2007-44d7-9c7e-705254285c91', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '6a454083-8d85-4a29-98dc-29eb0a072560', 'attached_at': '', 'detached_at': '', 'volume_id': '92c75742-2007-44d7-9c7e-705254285c91', 'serial': '92c75742-2007-44d7-9c7e-705254285c91'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1551.474883] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7be1f46-e3b2-464c-ad68-b8a49ada5287 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.477629] env[61978]: DEBUG oslo_concurrency.lockutils [None req-4b2ac639-8046-4aec-a79e-d8817cd60032 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "bc2d4609-bb75-48e7-859b-7cbb02041f52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.391s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.477890] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "bc2d4609-bb75-48e7-859b-7cbb02041f52" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.031s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.478127] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "bc2d4609-bb75-48e7-859b-7cbb02041f52-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.478348] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "bc2d4609-bb75-48e7-859b-7cbb02041f52-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.478666] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "bc2d4609-bb75-48e7-859b-7cbb02041f52-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.481557] env[61978]: INFO nova.compute.manager [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Terminating instance [ 1551.483377] env[61978]: DEBUG nova.compute.manager [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1551.483579] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1551.484364] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed60ee8b-3200-47d1-a64a-f0b124854b79 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.503783] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd73b6b-4023-45bc-a066-1fc4dcffa236 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.509922] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1551.510519] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-434d848a-7828-4954-9c9b-d2f7eae02b00 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.514097] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a140794-d357-43af-8651-10329ef20b98 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.517191] env[61978]: DEBUG oslo_vmware.api [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1551.517191] env[61978]: value = "task-1396219" [ 1551.517191] env[61978]: _type = "Task" [ 1551.517191] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.535683] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184872df-92ff-4903-9ef5-adfe078c6366 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.541126] env[61978]: DEBUG oslo_vmware.api [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.553496] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] The volume has not been displaced from its original location: [datastore2] volume-92c75742-2007-44d7-9c7e-705254285c91/volume-92c75742-2007-44d7-9c7e-705254285c91.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1551.558661] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Reconfiguring VM instance instance-00000070 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1551.558909] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ac71638-3b88-4171-b4db-49cbb3c2f2c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.576230] env[61978]: DEBUG oslo_vmware.api [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1551.576230] env[61978]: value = "task-1396220" [ 1551.576230] env[61978]: _type = "Task" [ 1551.576230] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.583495] env[61978]: DEBUG oslo_vmware.api [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396220, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.895580] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.168s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.896126] env[61978]: DEBUG nova.compute.manager [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1552.027039] env[61978]: DEBUG oslo_vmware.api [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396219, 'name': PowerOffVM_Task, 'duration_secs': 0.221882} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.027316] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1552.027492] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1552.027749] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c8ac6fd-2548-4980-81ff-7317d043bd1e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.086795] env[61978]: DEBUG oslo_vmware.api [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396220, 'name': ReconfigVM_Task, 'duration_secs': 0.233368} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.087094] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Reconfigured VM instance instance-00000070 to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1552.092694] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d18166f4-538a-4dd8-b2cc-6b15013f7a49 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.102829] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1552.102829] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1552.102908] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleting the datastore file [datastore2] bc2d4609-bb75-48e7-859b-7cbb02041f52 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1552.103182] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75a42c89-e343-4408-998d-b317892fb094 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.110027] env[61978]: DEBUG oslo_vmware.api [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1552.110027] env[61978]: value = "task-1396223" [ 1552.110027] env[61978]: _type = "Task" [ 1552.110027] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.112146] env[61978]: DEBUG oslo_vmware.api [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1552.112146] env[61978]: value = "task-1396222" [ 1552.112146] env[61978]: _type = "Task" [ 1552.112146] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.124102] env[61978]: DEBUG oslo_vmware.api [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.129410] env[61978]: DEBUG oslo_vmware.api [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396222, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.401655] env[61978]: DEBUG nova.compute.utils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1552.403079] env[61978]: DEBUG nova.compute.manager [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1552.403251] env[61978]: DEBUG nova.network.neutron [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1552.457740] env[61978]: DEBUG nova.policy [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd72a836e3aef4b59b1092b91f33fd929', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b289cdad1fe4ad38c5d987680be2367', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1552.622982] env[61978]: DEBUG oslo_vmware.api [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396223, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146885} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.625768] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1552.625965] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1552.626168] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1552.626348] env[61978]: INFO nova.compute.manager [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1552.626609] env[61978]: DEBUG oslo.service.loopingcall [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1552.626794] env[61978]: DEBUG oslo_vmware.api [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396222, 'name': ReconfigVM_Task, 'duration_secs': 0.158443} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.626988] env[61978]: DEBUG nova.compute.manager [-] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1552.627095] env[61978]: DEBUG nova.network.neutron [-] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1552.628642] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296082', 'volume_id': '92c75742-2007-44d7-9c7e-705254285c91', 'name': 'volume-92c75742-2007-44d7-9c7e-705254285c91', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '6a454083-8d85-4a29-98dc-29eb0a072560', 'attached_at': '', 'detached_at': '', 'volume_id': '92c75742-2007-44d7-9c7e-705254285c91', 'serial': '92c75742-2007-44d7-9c7e-705254285c91'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1552.786155] env[61978]: DEBUG nova.network.neutron [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Successfully created port: bcdfd00c-adfd-4464-8273-d5ef57460a54 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1552.906153] env[61978]: DEBUG nova.compute.manager [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1553.021856] env[61978]: DEBUG nova.compute.manager [req-04037420-fbf4-4b1a-b925-7c2e28d63fc9 req-3ee5e9c6-5024-4010-bc24-0ed222aa5056 service nova] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Received event network-vif-deleted-1e5b6479-cb7d-422f-ab6e-a24f660960ce {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1553.022176] env[61978]: INFO nova.compute.manager [req-04037420-fbf4-4b1a-b925-7c2e28d63fc9 req-3ee5e9c6-5024-4010-bc24-0ed222aa5056 service nova] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Neutron deleted interface 1e5b6479-cb7d-422f-ab6e-a24f660960ce; detaching it from the instance and deleting it from the info cache [ 1553.022380] env[61978]: DEBUG nova.network.neutron [req-04037420-fbf4-4b1a-b925-7c2e28d63fc9 req-3ee5e9c6-5024-4010-bc24-0ed222aa5056 service nova] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.174743] env[61978]: DEBUG nova.objects.instance [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lazy-loading 'flavor' on Instance uuid 6a454083-8d85-4a29-98dc-29eb0a072560 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1553.497836] env[61978]: DEBUG nova.network.neutron [-] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.524972] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2ed9595-eac7-44ca-b97a-3e58aacd88fe {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.535324] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c785a13-e905-4a68-a590-629f20c399ad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.562689] env[61978]: DEBUG nova.compute.manager [req-04037420-fbf4-4b1a-b925-7c2e28d63fc9 req-3ee5e9c6-5024-4010-bc24-0ed222aa5056 service nova] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Detach interface failed, port_id=1e5b6479-cb7d-422f-ab6e-a24f660960ce, reason: Instance bc2d4609-bb75-48e7-859b-7cbb02041f52 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1553.916279] env[61978]: DEBUG nova.compute.manager [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1553.944160] env[61978]: DEBUG nova.virt.hardware [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1553.944440] env[61978]: DEBUG nova.virt.hardware [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1553.944607] env[61978]: DEBUG nova.virt.hardware [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1553.944830] env[61978]: DEBUG nova.virt.hardware [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1553.944992] env[61978]: DEBUG nova.virt.hardware [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1553.945199] env[61978]: DEBUG nova.virt.hardware [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1553.945416] env[61978]: DEBUG nova.virt.hardware [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1553.945584] env[61978]: DEBUG nova.virt.hardware [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1553.945758] env[61978]: DEBUG nova.virt.hardware [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1553.945931] env[61978]: DEBUG nova.virt.hardware [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1553.946122] env[61978]: DEBUG nova.virt.hardware [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1553.947276] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7b2bf6-b58e-412b-bbe3-343b9d1ee937 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.955254] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7b6817-7c71-4847-9711-431b9b529299 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.001074] env[61978]: INFO nova.compute.manager [-] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Took 1.37 seconds to deallocate network for instance. [ 1554.182694] env[61978]: DEBUG oslo_concurrency.lockutils [None req-f052b775-c0e9-42bf-a1a6-a53d594cd58c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "6a454083-8d85-4a29-98dc-29eb0a072560" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.243s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.338385] env[61978]: DEBUG nova.network.neutron [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Successfully updated port: bcdfd00c-adfd-4464-8273-d5ef57460a54 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1554.507781] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.508083] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.508320] env[61978]: DEBUG nova.objects.instance [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lazy-loading 'resources' on Instance uuid bc2d4609-bb75-48e7-859b-7cbb02041f52 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1554.840573] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "refresh_cache-d542b9ef-a4d8-4dad-8b97-b9e67372e214" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.840730] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired lock "refresh_cache-d542b9ef-a4d8-4dad-8b97-b9e67372e214" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.840865] env[61978]: DEBUG nova.network.neutron [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1555.046707] env[61978]: DEBUG nova.compute.manager [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Received event network-vif-plugged-bcdfd00c-adfd-4464-8273-d5ef57460a54 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1555.046918] env[61978]: DEBUG oslo_concurrency.lockutils [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] Acquiring lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.047262] env[61978]: DEBUG oslo_concurrency.lockutils [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.047319] env[61978]: DEBUG oslo_concurrency.lockutils [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.047474] env[61978]: DEBUG nova.compute.manager [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] No waiting events found dispatching network-vif-plugged-bcdfd00c-adfd-4464-8273-d5ef57460a54 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1555.047643] env[61978]: WARNING nova.compute.manager [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Received unexpected event network-vif-plugged-bcdfd00c-adfd-4464-8273-d5ef57460a54 for instance with vm_state building and task_state spawning. [ 1555.047808] env[61978]: DEBUG nova.compute.manager [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Received event network-changed-bcdfd00c-adfd-4464-8273-d5ef57460a54 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1555.047966] env[61978]: DEBUG nova.compute.manager [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Refreshing instance network info cache due to event network-changed-bcdfd00c-adfd-4464-8273-d5ef57460a54. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1555.048151] env[61978]: DEBUG oslo_concurrency.lockutils [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] Acquiring lock "refresh_cache-d542b9ef-a4d8-4dad-8b97-b9e67372e214" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.083407] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327983d4-3c70-477a-b257-4881206e1515 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.091604] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e11f90-771b-43d2-8b6f-7e7b1f4a52d3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.120773] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50305f1-4466-46b8-998d-0b75e5fd012b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.127767] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05019a8e-494c-4870-ae38-ab5c03811ba2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.141610] env[61978]: DEBUG nova.compute.provider_tree [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1555.174505] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "6a454083-8d85-4a29-98dc-29eb0a072560" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.174796] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "6a454083-8d85-4a29-98dc-29eb0a072560" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.175055] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "6a454083-8d85-4a29-98dc-29eb0a072560-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.175256] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "6a454083-8d85-4a29-98dc-29eb0a072560-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.175429] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "6a454083-8d85-4a29-98dc-29eb0a072560-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.177487] env[61978]: INFO nova.compute.manager [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Terminating instance [ 1555.179317] env[61978]: DEBUG nova.compute.manager [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1555.179510] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1555.180327] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f468ba25-571f-420a-93e6-41a117f4100e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.188161] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1555.188396] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a56e20ad-77cb-44c6-a919-1913f3c1fdf5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.194277] env[61978]: DEBUG oslo_vmware.api [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1555.194277] env[61978]: value = "task-1396224" [ 1555.194277] env[61978]: _type = "Task" [ 1555.194277] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.201786] env[61978]: DEBUG oslo_vmware.api [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.375315] env[61978]: DEBUG nova.network.neutron [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1555.511340] env[61978]: DEBUG nova.network.neutron [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Updating instance_info_cache with network_info: [{"id": "bcdfd00c-adfd-4464-8273-d5ef57460a54", "address": "fa:16:3e:dc:66:09", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcdfd00c-ad", "ovs_interfaceid": "bcdfd00c-adfd-4464-8273-d5ef57460a54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.644327] env[61978]: DEBUG nova.scheduler.client.report [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1555.704213] env[61978]: DEBUG oslo_vmware.api [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396224, 'name': PowerOffVM_Task, 'duration_secs': 0.174218} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.704501] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1555.704675] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1555.704935] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8ebd8a7-f799-408d-b6a8-c78acc2e014d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.769558] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1555.769797] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Deleting contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1555.769985] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleting the datastore file [datastore1] 6a454083-8d85-4a29-98dc-29eb0a072560 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1555.770281] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6898ea68-b0ea-48d2-94fa-681f1c99dd39 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.776082] env[61978]: DEBUG oslo_vmware.api [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1555.776082] env[61978]: value = "task-1396226" [ 1555.776082] env[61978]: _type = "Task" [ 1555.776082] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.783737] env[61978]: DEBUG oslo_vmware.api [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396226, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.014231] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Releasing lock "refresh_cache-d542b9ef-a4d8-4dad-8b97-b9e67372e214" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.014586] env[61978]: DEBUG nova.compute.manager [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Instance network_info: |[{"id": "bcdfd00c-adfd-4464-8273-d5ef57460a54", "address": "fa:16:3e:dc:66:09", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcdfd00c-ad", "ovs_interfaceid": "bcdfd00c-adfd-4464-8273-d5ef57460a54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1556.014922] env[61978]: DEBUG oslo_concurrency.lockutils [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] Acquired lock "refresh_cache-d542b9ef-a4d8-4dad-8b97-b9e67372e214" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.015134] env[61978]: DEBUG nova.network.neutron [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Refreshing network info cache for port bcdfd00c-adfd-4464-8273-d5ef57460a54 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1556.016413] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:66:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '67921bdb-a7a0-46b5-ba05-ca997496e222', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcdfd00c-adfd-4464-8273-d5ef57460a54', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1556.024720] env[61978]: DEBUG oslo.service.loopingcall [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1556.027720] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1556.028529] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-740c79a5-8fda-4997-ae51-faffd5cbae9d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.048573] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1556.048573] env[61978]: value = "task-1396227" [ 1556.048573] env[61978]: _type = "Task" [ 1556.048573] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.056461] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396227, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.148945] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.641s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.167666] env[61978]: INFO nova.scheduler.client.report [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted allocations for instance bc2d4609-bb75-48e7-859b-7cbb02041f52 [ 1556.247759] env[61978]: DEBUG nova.network.neutron [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Updated VIF entry in instance network info cache for port bcdfd00c-adfd-4464-8273-d5ef57460a54. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1556.248151] env[61978]: DEBUG nova.network.neutron [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Updating instance_info_cache with network_info: [{"id": "bcdfd00c-adfd-4464-8273-d5ef57460a54", "address": "fa:16:3e:dc:66:09", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcdfd00c-ad", "ovs_interfaceid": "bcdfd00c-adfd-4464-8273-d5ef57460a54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.285697] env[61978]: DEBUG oslo_vmware.api [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396226, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141158} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.285896] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1556.286099] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Deleted contents of the VM from datastore datastore1 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1556.286385] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1556.286504] env[61978]: INFO nova.compute.manager [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1556.286723] env[61978]: DEBUG oslo.service.loopingcall [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1556.286921] env[61978]: DEBUG nova.compute.manager [-] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1556.287024] env[61978]: DEBUG nova.network.neutron [-] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1556.560542] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396227, 'name': CreateVM_Task, 'duration_secs': 0.358068} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.560719] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1556.561410] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1556.561589] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.561964] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1556.562557] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f980c1d1-bef5-4d75-93ee-cc4916e483a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.566951] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1556.566951] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b23c17-ef2c-16d8-2f00-890f5db01813" [ 1556.566951] env[61978]: _type = "Task" [ 1556.566951] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.574380] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b23c17-ef2c-16d8-2f00-890f5db01813, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.676634] env[61978]: DEBUG oslo_concurrency.lockutils [None req-e66cb2ee-8a3a-4751-9e04-00f9bef93b96 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "bc2d4609-bb75-48e7-859b-7cbb02041f52" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.199s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.750742] env[61978]: DEBUG oslo_concurrency.lockutils [req-267b1cb6-f9db-43e7-a62a-43a6d2247693 req-ba6a2653-2ac6-4f5f-93e1-7e1e5908f4ef service nova] Releasing lock "refresh_cache-d542b9ef-a4d8-4dad-8b97-b9e67372e214" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.933645] env[61978]: DEBUG oslo_concurrency.lockutils [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "d587cf18-1558-4e01-be53-3b7bf8287fdd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.934410] env[61978]: DEBUG oslo_concurrency.lockutils [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "d587cf18-1558-4e01-be53-3b7bf8287fdd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.934410] env[61978]: DEBUG oslo_concurrency.lockutils [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "d587cf18-1558-4e01-be53-3b7bf8287fdd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.934547] env[61978]: DEBUG oslo_concurrency.lockutils [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "d587cf18-1558-4e01-be53-3b7bf8287fdd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.934681] env[61978]: DEBUG oslo_concurrency.lockutils [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "d587cf18-1558-4e01-be53-3b7bf8287fdd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.936833] env[61978]: INFO nova.compute.manager [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Terminating instance [ 1556.938826] env[61978]: DEBUG nova.compute.manager [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1556.939045] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1556.939881] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037ce715-ba15-4828-8063-ececa4f201a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.947938] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1556.948200] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06bea4f2-a6f8-4ffd-9309-65acf685fc1a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.954119] env[61978]: DEBUG oslo_vmware.api [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1556.954119] env[61978]: value = "task-1396228" [ 1556.954119] env[61978]: _type = "Task" [ 1556.954119] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.961820] env[61978]: DEBUG oslo_vmware.api [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396228, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.074272] env[61978]: DEBUG nova.compute.manager [req-21042c77-7431-435d-b78f-3a75e91054c3 req-b5a6d2c1-9019-4f7a-9969-c1c8a38aa8ad service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Received event network-vif-deleted-6c4911ae-3f5e-46c7-9538-7e1a9811252e {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1557.074509] env[61978]: INFO nova.compute.manager [req-21042c77-7431-435d-b78f-3a75e91054c3 req-b5a6d2c1-9019-4f7a-9969-c1c8a38aa8ad service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Neutron deleted interface 6c4911ae-3f5e-46c7-9538-7e1a9811252e; detaching it from the instance and deleting it from the info cache [ 1557.074667] env[61978]: DEBUG nova.network.neutron [req-21042c77-7431-435d-b78f-3a75e91054c3 req-b5a6d2c1-9019-4f7a-9969-c1c8a38aa8ad service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1557.080214] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52b23c17-ef2c-16d8-2f00-890f5db01813, 'name': SearchDatastore_Task, 'duration_secs': 0.009606} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.080522] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.080747] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1557.081000] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.081190] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.081379] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1557.081883] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c91ed3d-8c0d-42b7-9d28-7fdf3403d59f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.090988] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1557.091263] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1557.092271] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dff4b992-f076-4f80-b418-8f1bc317c711 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.099072] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1557.099072] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5219d1c5-d2ac-2257-2e74-3af787cbe298" [ 1557.099072] env[61978]: _type = "Task" [ 1557.099072] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.109792] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5219d1c5-d2ac-2257-2e74-3af787cbe298, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.236053] env[61978]: DEBUG nova.network.neutron [-] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1557.463981] env[61978]: DEBUG oslo_vmware.api [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396228, 'name': PowerOffVM_Task, 'duration_secs': 0.169717} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.464279] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1557.464458] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1557.464714] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6609de8f-a4fb-4cd3-8896-1de728b12002 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.525892] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1557.526168] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1557.526387] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleting the datastore file [datastore2] d587cf18-1558-4e01-be53-3b7bf8287fdd {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1557.526702] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08cfc1ac-5f8f-441b-95b2-605dc29250b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.533071] env[61978]: DEBUG oslo_vmware.api [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1557.533071] env[61978]: value = "task-1396230" [ 1557.533071] env[61978]: _type = "Task" [ 1557.533071] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.540520] env[61978]: DEBUG oslo_vmware.api [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.577745] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0f61a62d-26ec-4d01-a57c-7034c8a37c1a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.586741] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3cb3147-5cfe-43d5-93ca-b9ee01c305ff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.610381] env[61978]: DEBUG nova.compute.manager [req-21042c77-7431-435d-b78f-3a75e91054c3 req-b5a6d2c1-9019-4f7a-9969-c1c8a38aa8ad service nova] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Detach interface failed, port_id=6c4911ae-3f5e-46c7-9538-7e1a9811252e, reason: Instance 6a454083-8d85-4a29-98dc-29eb0a072560 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1557.613810] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5219d1c5-d2ac-2257-2e74-3af787cbe298, 'name': SearchDatastore_Task, 'duration_secs': 0.009482} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.614589] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32bf90e1-7865-4407-987a-221dc38bac0c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.619712] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1557.619712] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]521275ba-9167-88ef-db9d-25918731aa87" [ 1557.619712] env[61978]: _type = "Task" [ 1557.619712] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.626624] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521275ba-9167-88ef-db9d-25918731aa87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.738518] env[61978]: INFO nova.compute.manager [-] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Took 1.45 seconds to deallocate network for instance. [ 1558.042972] env[61978]: DEBUG oslo_vmware.api [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396230, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134463} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.043265] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1558.043457] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1558.043644] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1558.043828] env[61978]: INFO nova.compute.manager [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1558.044088] env[61978]: DEBUG oslo.service.loopingcall [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1558.044293] env[61978]: DEBUG nova.compute.manager [-] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1558.044388] env[61978]: DEBUG nova.network.neutron [-] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1558.129488] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]521275ba-9167-88ef-db9d-25918731aa87, 'name': SearchDatastore_Task, 'duration_secs': 0.008393} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.129831] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.130048] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] d542b9ef-a4d8-4dad-8b97-b9e67372e214/d542b9ef-a4d8-4dad-8b97-b9e67372e214.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1558.130318] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-254cc55e-d665-4464-b964-3b2705d725b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.137858] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1558.137858] env[61978]: value = "task-1396231" [ 1558.137858] env[61978]: _type = "Task" [ 1558.137858] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.145690] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396231, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.245434] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.245771] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.245985] env[61978]: DEBUG nova.objects.instance [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lazy-loading 'resources' on Instance uuid 6a454083-8d85-4a29-98dc-29eb0a072560 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1558.647525] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396231, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467297} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.647788] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] d542b9ef-a4d8-4dad-8b97-b9e67372e214/d542b9ef-a4d8-4dad-8b97-b9e67372e214.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1558.648017] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1558.648286] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-265ae363-9b92-479e-8f99-79538776d6b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.653940] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1558.653940] env[61978]: value = "task-1396232" [ 1558.653940] env[61978]: _type = "Task" [ 1558.653940] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.661622] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396232, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.798696] env[61978]: DEBUG nova.network.neutron [-] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.803844] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cccc45-13f6-4f28-bcf3-92a8e397c091 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.811468] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe4a11c-ee73-41df-b3d7-28be37fd8469 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.841195] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb1bab8-d493-47c6-a27b-8badaa539af6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.847658] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92a0120-6adc-4dd3-bca0-62b043788c23 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.860771] env[61978]: DEBUG nova.compute.provider_tree [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1559.099267] env[61978]: DEBUG nova.compute.manager [req-11d2370c-a47a-4edf-8fea-ff69383b422b req-7ce8ac14-5b75-43d3-a60b-0ad2a9b69ea6 service nova] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Received event network-vif-deleted-ff1be352-d287-4618-a8a0-bd1affb328ec {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1559.164617] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396232, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067586} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.164958] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1559.165746] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399eaedc-e9d9-4266-a4b3-1253bf40528b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.190545] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] d542b9ef-a4d8-4dad-8b97-b9e67372e214/d542b9ef-a4d8-4dad-8b97-b9e67372e214.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1559.190864] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4116f072-c76b-4ed0-8e3f-44afa3067192 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.210505] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1559.210505] env[61978]: value = "task-1396233" [ 1559.210505] env[61978]: _type = "Task" [ 1559.210505] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.218188] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.301464] env[61978]: INFO nova.compute.manager [-] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Took 1.26 seconds to deallocate network for instance. [ 1559.363602] env[61978]: DEBUG nova.scheduler.client.report [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1559.720507] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396233, 'name': ReconfigVM_Task, 'duration_secs': 0.29165} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.720801] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Reconfigured VM instance instance-0000007a to attach disk [datastore2] d542b9ef-a4d8-4dad-8b97-b9e67372e214/d542b9ef-a4d8-4dad-8b97-b9e67372e214.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1559.721449] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b4036b2-d183-4fe8-b35c-aff39d63a6c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.727429] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1559.727429] env[61978]: value = "task-1396234" [ 1559.727429] env[61978]: _type = "Task" [ 1559.727429] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.734745] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396234, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.808133] env[61978]: DEBUG oslo_concurrency.lockutils [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.868448] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.623s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.870824] env[61978]: DEBUG oslo_concurrency.lockutils [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.063s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.871097] env[61978]: DEBUG nova.objects.instance [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lazy-loading 'resources' on Instance uuid d587cf18-1558-4e01-be53-3b7bf8287fdd {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1559.886502] env[61978]: INFO nova.scheduler.client.report [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleted allocations for instance 6a454083-8d85-4a29-98dc-29eb0a072560 [ 1560.064441] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1560.064666] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1560.236778] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396234, 'name': Rename_Task, 'duration_secs': 0.131818} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.237082] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1560.237330] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66aea3cb-ddd1-455f-b104-e8e4009ba3ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.243435] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1560.243435] env[61978]: value = "task-1396235" [ 1560.243435] env[61978]: _type = "Task" [ 1560.243435] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.250513] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396235, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.396211] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bcd6f354-a35e-4394-bc2f-e4e5d7d38a86 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "6a454083-8d85-4a29-98dc-29eb0a072560" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.221s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.431526] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe85d7c-b266-4adb-a6c6-2b998eab8d17 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.439735] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83d445e-8d48-4591-8ca9-e033e9dc4caf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.468689] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714e0891-6da8-4879-a779-b23db476c196 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.475873] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f193e2fb-889d-43b8-b968-7ef2d84480cb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.488614] env[61978]: DEBUG nova.compute.provider_tree [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1560.571417] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1560.571417] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1560.757023] env[61978]: DEBUG oslo_vmware.api [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396235, 'name': PowerOnVM_Task, 'duration_secs': 0.430226} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.757023] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1560.757023] env[61978]: INFO nova.compute.manager [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Took 6.84 seconds to spawn the instance on the hypervisor. [ 1560.757023] env[61978]: DEBUG nova.compute.manager [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1560.757023] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286b8e98-aed0-475e-91d8-c092d4f0ceac {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.992194] env[61978]: DEBUG nova.scheduler.client.report [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1561.275620] env[61978]: INFO nova.compute.manager [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Took 11.57 seconds to build instance. [ 1561.497099] env[61978]: DEBUG oslo_concurrency.lockutils [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.626s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.521349] env[61978]: INFO nova.scheduler.client.report [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted allocations for instance d587cf18-1558-4e01-be53-3b7bf8287fdd [ 1561.613881] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "refresh_cache-733c2f53-04d3-4a8b-a7c1-5194d7961a31" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.613881] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquired lock "refresh_cache-733c2f53-04d3-4a8b-a7c1-5194d7961a31" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.613881] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Forcefully refreshing network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1561.777847] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6bd84581-76d0-4961-9e3d-16de374e2468 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.075s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.784288] env[61978]: DEBUG nova.compute.manager [req-d73fcc2f-c716-4fcb-9f75-73ba9ae27e01 req-29e2efce-faf9-4c67-9133-3295ff3b7236 service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Received event network-changed-bcdfd00c-adfd-4464-8273-d5ef57460a54 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1561.784288] env[61978]: DEBUG nova.compute.manager [req-d73fcc2f-c716-4fcb-9f75-73ba9ae27e01 req-29e2efce-faf9-4c67-9133-3295ff3b7236 service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Refreshing instance network info cache due to event network-changed-bcdfd00c-adfd-4464-8273-d5ef57460a54. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1561.784288] env[61978]: DEBUG oslo_concurrency.lockutils [req-d73fcc2f-c716-4fcb-9f75-73ba9ae27e01 req-29e2efce-faf9-4c67-9133-3295ff3b7236 service nova] Acquiring lock "refresh_cache-d542b9ef-a4d8-4dad-8b97-b9e67372e214" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.784569] env[61978]: DEBUG oslo_concurrency.lockutils [req-d73fcc2f-c716-4fcb-9f75-73ba9ae27e01 req-29e2efce-faf9-4c67-9133-3295ff3b7236 service nova] Acquired lock "refresh_cache-d542b9ef-a4d8-4dad-8b97-b9e67372e214" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.784669] env[61978]: DEBUG nova.network.neutron [req-d73fcc2f-c716-4fcb-9f75-73ba9ae27e01 req-29e2efce-faf9-4c67-9133-3295ff3b7236 service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Refreshing network info cache for port bcdfd00c-adfd-4464-8273-d5ef57460a54 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1562.023794] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.024565] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.029710] env[61978]: DEBUG oslo_concurrency.lockutils [None req-08b87620-0531-4014-a4cf-6c51add4c144 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "d587cf18-1558-4e01-be53-3b7bf8287fdd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.096s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.526550] env[61978]: DEBUG nova.compute.manager [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1562.530142] env[61978]: DEBUG nova.network.neutron [req-d73fcc2f-c716-4fcb-9f75-73ba9ae27e01 req-29e2efce-faf9-4c67-9133-3295ff3b7236 service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Updated VIF entry in instance network info cache for port bcdfd00c-adfd-4464-8273-d5ef57460a54. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1562.530549] env[61978]: DEBUG nova.network.neutron [req-d73fcc2f-c716-4fcb-9f75-73ba9ae27e01 req-29e2efce-faf9-4c67-9133-3295ff3b7236 service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Updating instance_info_cache with network_info: [{"id": "bcdfd00c-adfd-4464-8273-d5ef57460a54", "address": "fa:16:3e:dc:66:09", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcdfd00c-ad", "ovs_interfaceid": "bcdfd00c-adfd-4464-8273-d5ef57460a54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.807722] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Updating instance_info_cache with network_info: [{"id": "850a6613-240f-4bb6-a3bd-cd95cd2ebe18", "address": "fa:16:3e:c6:16:a6", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap850a6613-24", "ovs_interfaceid": "850a6613-240f-4bb6-a3bd-cd95cd2ebe18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.810046] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.810046] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.035800] env[61978]: DEBUG oslo_concurrency.lockutils [req-d73fcc2f-c716-4fcb-9f75-73ba9ae27e01 req-29e2efce-faf9-4c67-9133-3295ff3b7236 service nova] Releasing lock "refresh_cache-d542b9ef-a4d8-4dad-8b97-b9e67372e214" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.049492] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.049776] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.051312] env[61978]: INFO nova.compute.claims [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1563.310050] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Releasing lock "refresh_cache-733c2f53-04d3-4a8b-a7c1-5194d7961a31" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.310334] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Updated the network info_cache for instance {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1563.310611] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.310858] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.311099] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.311496] env[61978]: DEBUG nova.compute.manager [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1563.314437] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.314669] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.314872] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.315042] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1563.315205] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.819190] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.836277] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.113752] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51da668-0c6e-481f-b7c4-1003b1939d6c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.120878] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f010f6a-7551-4914-a522-3fbcfb29f599 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.149842] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d14297-6ebb-439c-9d1e-1660c7c4fe01 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.157473] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5014673d-8e19-4894-94cc-d8319060943f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.173031] env[61978]: DEBUG nova.compute.provider_tree [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1564.675672] env[61978]: DEBUG nova.scheduler.client.report [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1565.181239] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.181766] env[61978]: DEBUG nova.compute.manager [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1565.184778] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.366s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.184952] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.185121] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1565.185413] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.349s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.186787] env[61978]: INFO nova.compute.claims [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1565.189713] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c143dd-70fa-4e8d-963e-5ed05e91ece0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.197348] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703e483b-893e-403f-85eb-5b8c6e087a04 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.210960] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae105b16-98f2-4e79-98a9-dfe44e662ae5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.217733] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ece4ef-5613-4af8-847c-43125246125e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.245808] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180876MB free_disk=185GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1565.245952] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.690708] env[61978]: DEBUG nova.compute.utils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1565.692149] env[61978]: DEBUG nova.compute.manager [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1565.692544] env[61978]: DEBUG nova.network.neutron [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1565.744273] env[61978]: DEBUG nova.policy [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b0d311a20433495487d3926eb92ce91c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8f40d19e7c74ade886c322a78583545', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1566.002302] env[61978]: DEBUG nova.network.neutron [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Successfully created port: 1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1566.198360] env[61978]: DEBUG nova.compute.manager [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1566.259631] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef4f80d-7c1f-409d-aa9c-971b190872f0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.266811] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40db924e-0835-445e-8671-1ec0533b636f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.295462] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05dcf2a-c49a-434a-96b4-885a16b9814e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.302147] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46601980-68fd-4dbc-ad52-b9291be6ca7b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.314674] env[61978]: DEBUG nova.compute.provider_tree [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1566.817637] env[61978]: DEBUG nova.scheduler.client.report [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1567.209795] env[61978]: DEBUG nova.compute.manager [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1567.240165] env[61978]: DEBUG nova.virt.hardware [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1567.240479] env[61978]: DEBUG nova.virt.hardware [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1567.240701] env[61978]: DEBUG nova.virt.hardware [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1567.240906] env[61978]: DEBUG nova.virt.hardware [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1567.241072] env[61978]: DEBUG nova.virt.hardware [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1567.241252] env[61978]: DEBUG nova.virt.hardware [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1567.241470] env[61978]: DEBUG nova.virt.hardware [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1567.241634] env[61978]: DEBUG nova.virt.hardware [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1567.241805] env[61978]: DEBUG nova.virt.hardware [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1567.241973] env[61978]: DEBUG nova.virt.hardware [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1567.242165] env[61978]: DEBUG nova.virt.hardware [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1567.243013] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249408fa-ec26-4335-b6b3-edf7c072236b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.250712] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb135d9d-4111-4db9-a783-ee3120938126 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.323439] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.138s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.323962] env[61978]: DEBUG nova.compute.manager [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1567.326530] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.080s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.379381] env[61978]: DEBUG nova.compute.manager [req-f4de6ab2-7ba1-4d8b-8d92-e1e4e24806a1 req-7994344a-677f-4a57-9fcd-9b5796e746be service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Received event network-vif-plugged-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1567.379621] env[61978]: DEBUG oslo_concurrency.lockutils [req-f4de6ab2-7ba1-4d8b-8d92-e1e4e24806a1 req-7994344a-677f-4a57-9fcd-9b5796e746be service nova] Acquiring lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.379861] env[61978]: DEBUG oslo_concurrency.lockutils [req-f4de6ab2-7ba1-4d8b-8d92-e1e4e24806a1 req-7994344a-677f-4a57-9fcd-9b5796e746be service nova] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.380055] env[61978]: DEBUG oslo_concurrency.lockutils [req-f4de6ab2-7ba1-4d8b-8d92-e1e4e24806a1 req-7994344a-677f-4a57-9fcd-9b5796e746be service nova] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.380238] env[61978]: DEBUG nova.compute.manager [req-f4de6ab2-7ba1-4d8b-8d92-e1e4e24806a1 req-7994344a-677f-4a57-9fcd-9b5796e746be service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] No waiting events found dispatching network-vif-plugged-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1567.380411] env[61978]: WARNING nova.compute.manager [req-f4de6ab2-7ba1-4d8b-8d92-e1e4e24806a1 req-7994344a-677f-4a57-9fcd-9b5796e746be service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Received unexpected event network-vif-plugged-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 for instance with vm_state building and task_state spawning. [ 1567.462410] env[61978]: DEBUG nova.network.neutron [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Successfully updated port: 1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1567.829401] env[61978]: DEBUG nova.compute.utils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1567.834449] env[61978]: DEBUG nova.compute.manager [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1567.834609] env[61978]: DEBUG nova.network.neutron [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1567.873127] env[61978]: DEBUG nova.policy [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df950ca91cd64479950545608f749fb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaa4a0cb1a4c45949b43032fd9395200', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1567.964709] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.964880] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.965015] env[61978]: DEBUG nova.network.neutron [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1568.108541] env[61978]: DEBUG nova.network.neutron [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Successfully created port: 3600e056-00fc-43b0-844b-9e7496d51dfd {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1568.338172] env[61978]: DEBUG nova.compute.manager [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1568.358211] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 733c2f53-04d3-4a8b-a7c1-5194d7961a31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1568.358400] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance d542b9ef-a4d8-4dad-8b97-b9e67372e214 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1568.358557] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance a48d5ef1-b66b-429e-bbff-2351ad5eda32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1568.358720] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1568.358981] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1568.359173] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1568.421134] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090a16e3-1268-4469-8b57-95f91de02e29 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.428577] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e88d83-8639-4b3b-b5d1-e728aca8243c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.458519] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1523a30d-9a62-4084-a281-d6651ef0bb82 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.469667] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29c3b25-e652-4a46-aeea-71ae8bc4b8a0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.483499] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1568.504272] env[61978]: DEBUG nova.network.neutron [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1568.632105] env[61978]: DEBUG nova.network.neutron [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updating instance_info_cache with network_info: [{"id": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "address": "fa:16:3e:ae:0d:68", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1053461c-99", "ovs_interfaceid": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.987036] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1569.134677] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.135083] env[61978]: DEBUG nova.compute.manager [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Instance network_info: |[{"id": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "address": "fa:16:3e:ae:0d:68", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1053461c-99", "ovs_interfaceid": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1569.135563] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:0d:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1053461c-995c-4bdc-a58c-52c4b5d4d8a5', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1569.143232] env[61978]: DEBUG oslo.service.loopingcall [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1569.143484] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1569.143751] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02763329-3430-4d6b-a2c1-53dbfb75661a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.163339] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1569.163339] env[61978]: value = "task-1396236" [ 1569.163339] env[61978]: _type = "Task" [ 1569.163339] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.170961] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396236, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.348103] env[61978]: DEBUG nova.compute.manager [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1569.375103] env[61978]: DEBUG nova.virt.hardware [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1569.375385] env[61978]: DEBUG nova.virt.hardware [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1569.375552] env[61978]: DEBUG nova.virt.hardware [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1569.375744] env[61978]: DEBUG nova.virt.hardware [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1569.375894] env[61978]: DEBUG nova.virt.hardware [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1569.376055] env[61978]: DEBUG nova.virt.hardware [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1569.376271] env[61978]: DEBUG nova.virt.hardware [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1569.376435] env[61978]: DEBUG nova.virt.hardware [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1569.376607] env[61978]: DEBUG nova.virt.hardware [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1569.376774] env[61978]: DEBUG nova.virt.hardware [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1569.376951] env[61978]: DEBUG nova.virt.hardware [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1569.377812] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffc2d15-8329-45e2-9f33-f2ae3bf49ef4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.385194] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4132ba-c6db-47d9-9a47-ba556abb7b41 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.404258] env[61978]: DEBUG nova.compute.manager [req-1edbeb00-8c32-4f9c-9c06-2f43b35ffc08 req-93fbe887-defb-4f9a-a5ca-61f4e75561a8 service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Received event network-changed-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1569.404475] env[61978]: DEBUG nova.compute.manager [req-1edbeb00-8c32-4f9c-9c06-2f43b35ffc08 req-93fbe887-defb-4f9a-a5ca-61f4e75561a8 service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Refreshing instance network info cache due to event network-changed-1053461c-995c-4bdc-a58c-52c4b5d4d8a5. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1569.404702] env[61978]: DEBUG oslo_concurrency.lockutils [req-1edbeb00-8c32-4f9c-9c06-2f43b35ffc08 req-93fbe887-defb-4f9a-a5ca-61f4e75561a8 service nova] Acquiring lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.404863] env[61978]: DEBUG oslo_concurrency.lockutils [req-1edbeb00-8c32-4f9c-9c06-2f43b35ffc08 req-93fbe887-defb-4f9a-a5ca-61f4e75561a8 service nova] Acquired lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.405045] env[61978]: DEBUG nova.network.neutron [req-1edbeb00-8c32-4f9c-9c06-2f43b35ffc08 req-93fbe887-defb-4f9a-a5ca-61f4e75561a8 service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Refreshing network info cache for port 1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1569.491352] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1569.491583] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.165s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.673733] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396236, 'name': CreateVM_Task, 'duration_secs': 0.287096} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.674226] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1569.674959] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.675145] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.675474] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1569.675737] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de6db47e-03ae-4b18-812e-8dbc71fc41a9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.680182] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1569.680182] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f95acb-8f22-aa2e-038f-72ad3eff554a" [ 1569.680182] env[61978]: _type = "Task" [ 1569.680182] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.692021] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f95acb-8f22-aa2e-038f-72ad3eff554a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.776128] env[61978]: DEBUG nova.network.neutron [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Successfully updated port: 3600e056-00fc-43b0-844b-9e7496d51dfd {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1570.097074] env[61978]: DEBUG nova.network.neutron [req-1edbeb00-8c32-4f9c-9c06-2f43b35ffc08 req-93fbe887-defb-4f9a-a5ca-61f4e75561a8 service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updated VIF entry in instance network info cache for port 1053461c-995c-4bdc-a58c-52c4b5d4d8a5. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1570.097453] env[61978]: DEBUG nova.network.neutron [req-1edbeb00-8c32-4f9c-9c06-2f43b35ffc08 req-93fbe887-defb-4f9a-a5ca-61f4e75561a8 service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updating instance_info_cache with network_info: [{"id": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "address": "fa:16:3e:ae:0d:68", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1053461c-99", "ovs_interfaceid": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.190711] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f95acb-8f22-aa2e-038f-72ad3eff554a, 'name': SearchDatastore_Task, 'duration_secs': 0.010431} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.191021] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.191305] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1570.191546] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.191697] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.191883] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1570.192168] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6c65fb5-422d-4b57-850e-2ed65b1aebad {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.199905] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1570.200088] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1570.200746] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13c9a109-4f8f-4831-89b1-18a18cbb5070 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.205844] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1570.205844] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528f2f1d-1ff2-97e9-066e-010cccd42dfc" [ 1570.205844] env[61978]: _type = "Task" [ 1570.205844] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.212695] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528f2f1d-1ff2-97e9-066e-010cccd42dfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.279742] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "refresh_cache-6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.279742] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "refresh_cache-6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.279742] env[61978]: DEBUG nova.network.neutron [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1570.600472] env[61978]: DEBUG oslo_concurrency.lockutils [req-1edbeb00-8c32-4f9c-9c06-2f43b35ffc08 req-93fbe887-defb-4f9a-a5ca-61f4e75561a8 service nova] Releasing lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.716435] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528f2f1d-1ff2-97e9-066e-010cccd42dfc, 'name': SearchDatastore_Task, 'duration_secs': 0.007857} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.717252] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e45911ec-2c56-421e-85be-0cfb77f512cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.722171] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1570.722171] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5220e56d-74d7-bf15-4fce-6d14713af051" [ 1570.722171] env[61978]: _type = "Task" [ 1570.722171] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.729183] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5220e56d-74d7-bf15-4fce-6d14713af051, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.810041] env[61978]: DEBUG nova.network.neutron [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1570.940615] env[61978]: DEBUG nova.network.neutron [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Updating instance_info_cache with network_info: [{"id": "3600e056-00fc-43b0-844b-9e7496d51dfd", "address": "fa:16:3e:72:19:e4", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3600e056-00", "ovs_interfaceid": "3600e056-00fc-43b0-844b-9e7496d51dfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.232364] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5220e56d-74d7-bf15-4fce-6d14713af051, 'name': SearchDatastore_Task, 'duration_secs': 0.008557} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.232627] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.232891] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a48d5ef1-b66b-429e-bbff-2351ad5eda32/a48d5ef1-b66b-429e-bbff-2351ad5eda32.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1571.233160] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19538429-d82a-4adc-9ff5-acf76f3a7e39 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.239874] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1571.239874] env[61978]: value = "task-1396237" [ 1571.239874] env[61978]: _type = "Task" [ 1571.239874] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.246692] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.441408] env[61978]: DEBUG nova.compute.manager [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Received event network-vif-plugged-3600e056-00fc-43b0-844b-9e7496d51dfd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1571.441689] env[61978]: DEBUG oslo_concurrency.lockutils [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] Acquiring lock "6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.441958] env[61978]: DEBUG oslo_concurrency.lockutils [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] Lock "6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.442124] env[61978]: DEBUG oslo_concurrency.lockutils [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] Lock "6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.442310] env[61978]: DEBUG nova.compute.manager [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] No waiting events found dispatching network-vif-plugged-3600e056-00fc-43b0-844b-9e7496d51dfd {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1571.442485] env[61978]: WARNING nova.compute.manager [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Received unexpected event network-vif-plugged-3600e056-00fc-43b0-844b-9e7496d51dfd for instance with vm_state building and task_state spawning. [ 1571.442647] env[61978]: DEBUG nova.compute.manager [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Received event network-changed-3600e056-00fc-43b0-844b-9e7496d51dfd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1571.442810] env[61978]: DEBUG nova.compute.manager [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Refreshing instance network info cache due to event network-changed-3600e056-00fc-43b0-844b-9e7496d51dfd. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1571.442991] env[61978]: DEBUG oslo_concurrency.lockutils [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] Acquiring lock "refresh_cache-6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.443774] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "refresh_cache-6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.444101] env[61978]: DEBUG nova.compute.manager [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Instance network_info: |[{"id": "3600e056-00fc-43b0-844b-9e7496d51dfd", "address": "fa:16:3e:72:19:e4", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3600e056-00", "ovs_interfaceid": "3600e056-00fc-43b0-844b-9e7496d51dfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1571.444411] env[61978]: DEBUG oslo_concurrency.lockutils [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] Acquired lock "refresh_cache-6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.444590] env[61978]: DEBUG nova.network.neutron [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Refreshing network info cache for port 3600e056-00fc-43b0-844b-9e7496d51dfd {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1571.445989] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:19:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb68953b-dee5-4d9d-b47b-277336ba76dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3600e056-00fc-43b0-844b-9e7496d51dfd', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1571.453921] env[61978]: DEBUG oslo.service.loopingcall [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1571.458746] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1571.459592] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62bf74ce-0247-4c5a-8b03-366d8311c3d6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.481727] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1571.481727] env[61978]: value = "task-1396238" [ 1571.481727] env[61978]: _type = "Task" [ 1571.481727] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.491420] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396238, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.684757] env[61978]: DEBUG nova.network.neutron [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Updated VIF entry in instance network info cache for port 3600e056-00fc-43b0-844b-9e7496d51dfd. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1571.685181] env[61978]: DEBUG nova.network.neutron [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Updating instance_info_cache with network_info: [{"id": "3600e056-00fc-43b0-844b-9e7496d51dfd", "address": "fa:16:3e:72:19:e4", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3600e056-00", "ovs_interfaceid": "3600e056-00fc-43b0-844b-9e7496d51dfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.749863] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396237, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.436401} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.750123] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a48d5ef1-b66b-429e-bbff-2351ad5eda32/a48d5ef1-b66b-429e-bbff-2351ad5eda32.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1571.750348] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1571.750590] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73777dd3-e3ae-4667-8b98-1fc0f7e5ee03 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.756327] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1571.756327] env[61978]: value = "task-1396239" [ 1571.756327] env[61978]: _type = "Task" [ 1571.756327] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.763551] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396239, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.991168] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396238, 'name': CreateVM_Task, 'duration_secs': 0.395701} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.991336] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1571.991996] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.992187] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.992498] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1571.992747] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ad88f79-e411-4da8-9f96-6d2cf0bd94b1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.996880] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1571.996880] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a48a55-4707-5fce-ed8c-dd84969c361e" [ 1571.996880] env[61978]: _type = "Task" [ 1571.996880] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.004221] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a48a55-4707-5fce-ed8c-dd84969c361e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.188367] env[61978]: DEBUG oslo_concurrency.lockutils [req-f7782bfc-a5e1-40cc-936c-585cf0dd35de req-4df8986b-bf9c-408e-b76c-466f2ce340f3 service nova] Releasing lock "refresh_cache-6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.265562] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396239, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062935} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.266824] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1572.266824] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b283ba-f22d-4ebf-be56-9648c37359b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.287851] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] a48d5ef1-b66b-429e-bbff-2351ad5eda32/a48d5ef1-b66b-429e-bbff-2351ad5eda32.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1572.288083] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f21f0d7a-07f6-47ba-ac18-61b0bac954bd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.307161] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1572.307161] env[61978]: value = "task-1396240" [ 1572.307161] env[61978]: _type = "Task" [ 1572.307161] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.314579] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396240, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.508118] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52a48a55-4707-5fce-ed8c-dd84969c361e, 'name': SearchDatastore_Task, 'duration_secs': 0.008992} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.508394] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.508639] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1572.508943] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.509118] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.509320] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1572.509603] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01aa021e-b2ed-4ef1-ba88-c0e9f52f9d6d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.517335] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1572.517516] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1572.518197] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-477f00ac-0e6a-47c7-992d-c50a2e773e35 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.523343] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1572.523343] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d33d97-b35c-033a-9601-d0680aca9941" [ 1572.523343] env[61978]: _type = "Task" [ 1572.523343] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.531095] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d33d97-b35c-033a-9601-d0680aca9941, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.816993] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396240, 'name': ReconfigVM_Task, 'duration_secs': 0.276939} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.817311] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Reconfigured VM instance instance-0000007b to attach disk [datastore2] a48d5ef1-b66b-429e-bbff-2351ad5eda32/a48d5ef1-b66b-429e-bbff-2351ad5eda32.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1572.818063] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c25d0e2e-833e-43a7-9192-23994a92dae3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.824099] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1572.824099] env[61978]: value = "task-1396241" [ 1572.824099] env[61978]: _type = "Task" [ 1572.824099] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.831315] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396241, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.035721] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52d33d97-b35c-033a-9601-d0680aca9941, 'name': SearchDatastore_Task, 'duration_secs': 0.007978} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.036531] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35ea33e0-1dfc-4c47-b99a-e0a2501ae108 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.041289] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1573.041289] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f4bade-df2c-9e2f-2fcc-c22a72926f5d" [ 1573.041289] env[61978]: _type = "Task" [ 1573.041289] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.049595] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f4bade-df2c-9e2f-2fcc-c22a72926f5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.334135] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396241, 'name': Rename_Task, 'duration_secs': 0.138286} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.334411] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1573.334661] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a66ffed-0a3d-418e-a993-d3ae11f23abc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.340390] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1573.340390] env[61978]: value = "task-1396242" [ 1573.340390] env[61978]: _type = "Task" [ 1573.340390] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.347255] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396242, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.551592] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52f4bade-df2c-9e2f-2fcc-c22a72926f5d, 'name': SearchDatastore_Task, 'duration_secs': 0.06135} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.551840] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.552111] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa/6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1573.552371] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1949f21c-56e5-4763-ab86-f18967f997d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.558652] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1573.558652] env[61978]: value = "task-1396243" [ 1573.558652] env[61978]: _type = "Task" [ 1573.558652] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.565679] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.851027] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396242, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.068943] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396243, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.351100] env[61978]: DEBUG oslo_vmware.api [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396242, 'name': PowerOnVM_Task, 'duration_secs': 0.591224} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.351393] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1574.351624] env[61978]: INFO nova.compute.manager [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Took 7.14 seconds to spawn the instance on the hypervisor. [ 1574.351814] env[61978]: DEBUG nova.compute.manager [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1574.352570] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336bc7c4-6488-426f-9399-966e98a38d71 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.569945] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396243, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561368} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.570266] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa/6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1574.570601] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1574.570870] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eaf28b56-43b5-43a4-92cc-8de83d46e1fc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.576477] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1574.576477] env[61978]: value = "task-1396244" [ 1574.576477] env[61978]: _type = "Task" [ 1574.576477] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.584335] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396244, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.871780] env[61978]: INFO nova.compute.manager [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Took 11.84 seconds to build instance. [ 1575.010027] env[61978]: DEBUG nova.compute.manager [req-bfbf57fc-29bf-4633-b4f3-b6f9567ee6ec req-5d61571a-fb50-4728-96d6-94adf7860c4c service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Received event network-changed-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1575.010273] env[61978]: DEBUG nova.compute.manager [req-bfbf57fc-29bf-4633-b4f3-b6f9567ee6ec req-5d61571a-fb50-4728-96d6-94adf7860c4c service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Refreshing instance network info cache due to event network-changed-1053461c-995c-4bdc-a58c-52c4b5d4d8a5. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1575.010446] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfbf57fc-29bf-4633-b4f3-b6f9567ee6ec req-5d61571a-fb50-4728-96d6-94adf7860c4c service nova] Acquiring lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.010601] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfbf57fc-29bf-4633-b4f3-b6f9567ee6ec req-5d61571a-fb50-4728-96d6-94adf7860c4c service nova] Acquired lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.010769] env[61978]: DEBUG nova.network.neutron [req-bfbf57fc-29bf-4633-b4f3-b6f9567ee6ec req-5d61571a-fb50-4728-96d6-94adf7860c4c service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Refreshing network info cache for port 1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1575.087341] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396244, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059321} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.087657] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1575.088499] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-773ff720-dba9-4db5-846e-9fd2efe95aa4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.113850] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa/6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1575.114216] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19f77417-7f57-4186-a94d-ff999cc44aec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.135920] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1575.135920] env[61978]: value = "task-1396245" [ 1575.135920] env[61978]: _type = "Task" [ 1575.135920] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.144389] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396245, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.373755] env[61978]: DEBUG oslo_concurrency.lockutils [None req-bc465eda-13b7-4709-8e4d-a60f8566bb4c tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.349s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.646024] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396245, 'name': ReconfigVM_Task, 'duration_secs': 0.299581} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.646318] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa/6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1575.646955] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47d6e3c6-8a09-4c7f-b3fd-34785633c9d6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.652875] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1575.652875] env[61978]: value = "task-1396246" [ 1575.652875] env[61978]: _type = "Task" [ 1575.652875] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.662697] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396246, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.727876] env[61978]: DEBUG nova.network.neutron [req-bfbf57fc-29bf-4633-b4f3-b6f9567ee6ec req-5d61571a-fb50-4728-96d6-94adf7860c4c service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updated VIF entry in instance network info cache for port 1053461c-995c-4bdc-a58c-52c4b5d4d8a5. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1575.728327] env[61978]: DEBUG nova.network.neutron [req-bfbf57fc-29bf-4633-b4f3-b6f9567ee6ec req-5d61571a-fb50-4728-96d6-94adf7860c4c service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updating instance_info_cache with network_info: [{"id": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "address": "fa:16:3e:ae:0d:68", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1053461c-99", "ovs_interfaceid": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.162778] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396246, 'name': Rename_Task, 'duration_secs': 0.130735} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.163123] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1576.163336] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f88a2e6a-6650-49eb-a367-a6f968a3a83d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.168909] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1576.168909] env[61978]: value = "task-1396247" [ 1576.168909] env[61978]: _type = "Task" [ 1576.168909] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.176179] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396247, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.231013] env[61978]: DEBUG oslo_concurrency.lockutils [req-bfbf57fc-29bf-4633-b4f3-b6f9567ee6ec req-5d61571a-fb50-4728-96d6-94adf7860c4c service nova] Releasing lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.680068] env[61978]: DEBUG oslo_vmware.api [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396247, 'name': PowerOnVM_Task, 'duration_secs': 0.418829} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.680068] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1576.680068] env[61978]: INFO nova.compute.manager [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Took 7.33 seconds to spawn the instance on the hypervisor. [ 1576.680068] env[61978]: DEBUG nova.compute.manager [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1576.680666] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75cf220c-cac4-4026-b2f2-46ddc1f45fc8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.196064] env[61978]: INFO nova.compute.manager [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Took 13.38 seconds to build instance. [ 1577.698353] env[61978]: DEBUG oslo_concurrency.lockutils [None req-1f269fb7-28eb-4835-af45-faecb56febea tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.888s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.301420] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.301808] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.301853] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.302050] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.302234] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.304458] env[61978]: INFO nova.compute.manager [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Terminating instance [ 1578.306285] env[61978]: DEBUG nova.compute.manager [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1578.306485] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1578.307350] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a281c2f4-9ab8-4c85-9bf9-fa39c43f7bf7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.314870] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1578.315112] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44a99e65-ef54-40c4-b871-f0ce957c5232 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.323581] env[61978]: DEBUG oslo_vmware.api [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1578.323581] env[61978]: value = "task-1396248" [ 1578.323581] env[61978]: _type = "Task" [ 1578.323581] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.332427] env[61978]: DEBUG oslo_vmware.api [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396248, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.833508] env[61978]: DEBUG oslo_vmware.api [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396248, 'name': PowerOffVM_Task, 'duration_secs': 0.183718} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.833782] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1578.833961] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1578.834229] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7275e48b-859a-4066-8dcb-18a89853927e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.892101] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1578.892349] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1578.892543] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleting the datastore file [datastore2] 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1578.892808] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1adc12f7-ab71-45ab-b30e-47ff9d633eba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.898814] env[61978]: DEBUG oslo_vmware.api [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1578.898814] env[61978]: value = "task-1396250" [ 1578.898814] env[61978]: _type = "Task" [ 1578.898814] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.907012] env[61978]: DEBUG oslo_vmware.api [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396250, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.408628] env[61978]: DEBUG oslo_vmware.api [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396250, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153601} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.408996] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1579.409129] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1579.409326] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1579.409571] env[61978]: INFO nova.compute.manager [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1579.409848] env[61978]: DEBUG oslo.service.loopingcall [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1579.410076] env[61978]: DEBUG nova.compute.manager [-] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1579.410176] env[61978]: DEBUG nova.network.neutron [-] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1579.671249] env[61978]: DEBUG nova.compute.manager [req-df3c5af2-5df1-434c-9dc9-00a009b599d0 req-9b3ce56e-a66a-4c5d-bd16-cf8aa3230420 service nova] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Received event network-vif-deleted-3600e056-00fc-43b0-844b-9e7496d51dfd {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1579.671249] env[61978]: INFO nova.compute.manager [req-df3c5af2-5df1-434c-9dc9-00a009b599d0 req-9b3ce56e-a66a-4c5d-bd16-cf8aa3230420 service nova] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Neutron deleted interface 3600e056-00fc-43b0-844b-9e7496d51dfd; detaching it from the instance and deleting it from the info cache [ 1579.671249] env[61978]: DEBUG nova.network.neutron [req-df3c5af2-5df1-434c-9dc9-00a009b599d0 req-9b3ce56e-a66a-4c5d-bd16-cf8aa3230420 service nova] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.143019] env[61978]: DEBUG nova.network.neutron [-] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.174481] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b5c97d23-2772-442c-96b4-69a08f30a558 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.184585] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f958e9ac-fdc8-4827-8189-6ca0eb4b2fda {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.210122] env[61978]: DEBUG nova.compute.manager [req-df3c5af2-5df1-434c-9dc9-00a009b599d0 req-9b3ce56e-a66a-4c5d-bd16-cf8aa3230420 service nova] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Detach interface failed, port_id=3600e056-00fc-43b0-844b-9e7496d51dfd, reason: Instance 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1580.646027] env[61978]: INFO nova.compute.manager [-] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Took 1.24 seconds to deallocate network for instance. [ 1581.152648] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.153034] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.153275] env[61978]: DEBUG nova.objects.instance [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lazy-loading 'resources' on Instance uuid 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1581.721233] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e6b50b-c822-42d2-9641-38c8629cdda3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.729498] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9373d18f-ae8f-4989-8c88-72134f322cc6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.760791] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0df2a4-c0bf-42c3-bc8d-e1a80ba369db {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.768695] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36f9b98-18a5-492e-9165-2de90e83d18d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.782090] env[61978]: DEBUG nova.compute.provider_tree [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1582.285734] env[61978]: DEBUG nova.scheduler.client.report [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1582.791058] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.638s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.813142] env[61978]: INFO nova.scheduler.client.report [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted allocations for instance 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa [ 1583.321262] env[61978]: DEBUG oslo_concurrency.lockutils [None req-89c9bdb2-9bbb-4d32-8969-dcaca1803c5e tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.019s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.087055] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "03bd4306-195f-44f8-a35c-32869baff416" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.087330] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "03bd4306-195f-44f8-a35c-32869baff416" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.589614] env[61978]: DEBUG nova.compute.manager [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1585.113046] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.113046] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.113908] env[61978]: INFO nova.compute.claims [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1586.176346] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2ffdb5-be5d-4948-9e45-232f31dd2600 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.183870] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2ed834-800e-4708-9afb-d49a324dfcee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.211845] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b85ad4f-9746-4dbe-91d0-06a3adf64615 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.218245] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6b5a47-7858-466b-a1b5-f2de1908cb4b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.230502] env[61978]: DEBUG nova.compute.provider_tree [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1586.733616] env[61978]: DEBUG nova.scheduler.client.report [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1587.241816] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.129s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.242442] env[61978]: DEBUG nova.compute.manager [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1587.747133] env[61978]: DEBUG nova.compute.utils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1587.748988] env[61978]: DEBUG nova.compute.manager [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1587.749171] env[61978]: DEBUG nova.network.neutron [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1587.796984] env[61978]: DEBUG nova.policy [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df950ca91cd64479950545608f749fb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaa4a0cb1a4c45949b43032fd9395200', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1588.037841] env[61978]: DEBUG nova.network.neutron [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Successfully created port: 6ce2795b-868b-4ae7-a2df-df5b7137aa42 {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1588.253393] env[61978]: DEBUG nova.compute.manager [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1589.265570] env[61978]: DEBUG nova.compute.manager [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1589.291301] env[61978]: DEBUG nova.virt.hardware [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1589.291553] env[61978]: DEBUG nova.virt.hardware [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1589.291712] env[61978]: DEBUG nova.virt.hardware [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1589.291894] env[61978]: DEBUG nova.virt.hardware [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1589.293024] env[61978]: DEBUG nova.virt.hardware [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1589.293024] env[61978]: DEBUG nova.virt.hardware [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1589.293024] env[61978]: DEBUG nova.virt.hardware [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1589.293024] env[61978]: DEBUG nova.virt.hardware [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1589.293024] env[61978]: DEBUG nova.virt.hardware [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1589.293024] env[61978]: DEBUG nova.virt.hardware [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1589.293271] env[61978]: DEBUG nova.virt.hardware [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1589.294046] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5755fa0-f3e8-43d4-bf7b-0e9c77811c0c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.301793] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb05add-d3ee-405f-848b-7558838ec6b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.407165] env[61978]: DEBUG nova.compute.manager [req-702fd796-c099-4e48-862e-cca5dab19ef8 req-c292f493-8825-47e2-9570-33df5295f753 service nova] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Received event network-vif-plugged-6ce2795b-868b-4ae7-a2df-df5b7137aa42 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1589.407403] env[61978]: DEBUG oslo_concurrency.lockutils [req-702fd796-c099-4e48-862e-cca5dab19ef8 req-c292f493-8825-47e2-9570-33df5295f753 service nova] Acquiring lock "03bd4306-195f-44f8-a35c-32869baff416-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.407617] env[61978]: DEBUG oslo_concurrency.lockutils [req-702fd796-c099-4e48-862e-cca5dab19ef8 req-c292f493-8825-47e2-9570-33df5295f753 service nova] Lock "03bd4306-195f-44f8-a35c-32869baff416-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.407816] env[61978]: DEBUG oslo_concurrency.lockutils [req-702fd796-c099-4e48-862e-cca5dab19ef8 req-c292f493-8825-47e2-9570-33df5295f753 service nova] Lock "03bd4306-195f-44f8-a35c-32869baff416-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.407936] env[61978]: DEBUG nova.compute.manager [req-702fd796-c099-4e48-862e-cca5dab19ef8 req-c292f493-8825-47e2-9570-33df5295f753 service nova] [instance: 03bd4306-195f-44f8-a35c-32869baff416] No waiting events found dispatching network-vif-plugged-6ce2795b-868b-4ae7-a2df-df5b7137aa42 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1589.408120] env[61978]: WARNING nova.compute.manager [req-702fd796-c099-4e48-862e-cca5dab19ef8 req-c292f493-8825-47e2-9570-33df5295f753 service nova] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Received unexpected event network-vif-plugged-6ce2795b-868b-4ae7-a2df-df5b7137aa42 for instance with vm_state building and task_state spawning. [ 1589.490323] env[61978]: DEBUG nova.network.neutron [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Successfully updated port: 6ce2795b-868b-4ae7-a2df-df5b7137aa42 {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1589.993501] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "refresh_cache-03bd4306-195f-44f8-a35c-32869baff416" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.993607] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "refresh_cache-03bd4306-195f-44f8-a35c-32869baff416" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.993771] env[61978]: DEBUG nova.network.neutron [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1590.528407] env[61978]: DEBUG nova.network.neutron [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1590.658757] env[61978]: DEBUG nova.network.neutron [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Updating instance_info_cache with network_info: [{"id": "6ce2795b-868b-4ae7-a2df-df5b7137aa42", "address": "fa:16:3e:e5:6f:25", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ce2795b-86", "ovs_interfaceid": "6ce2795b-868b-4ae7-a2df-df5b7137aa42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1591.161598] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "refresh_cache-03bd4306-195f-44f8-a35c-32869baff416" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.161976] env[61978]: DEBUG nova.compute.manager [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Instance network_info: |[{"id": "6ce2795b-868b-4ae7-a2df-df5b7137aa42", "address": "fa:16:3e:e5:6f:25", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ce2795b-86", "ovs_interfaceid": "6ce2795b-868b-4ae7-a2df-df5b7137aa42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1591.162472] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:6f:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb68953b-dee5-4d9d-b47b-277336ba76dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ce2795b-868b-4ae7-a2df-df5b7137aa42', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1591.169807] env[61978]: DEBUG oslo.service.loopingcall [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1591.170062] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1591.170356] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-daa79f61-d761-4c44-82a2-441ce537a330 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.190097] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1591.190097] env[61978]: value = "task-1396251" [ 1591.190097] env[61978]: _type = "Task" [ 1591.190097] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.197283] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396251, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.433916] env[61978]: DEBUG nova.compute.manager [req-9c41ad14-9efb-4a69-9e4b-eb179ce946ef req-726a5a29-110d-430b-8047-d75d01e60760 service nova] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Received event network-changed-6ce2795b-868b-4ae7-a2df-df5b7137aa42 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1591.434221] env[61978]: DEBUG nova.compute.manager [req-9c41ad14-9efb-4a69-9e4b-eb179ce946ef req-726a5a29-110d-430b-8047-d75d01e60760 service nova] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Refreshing instance network info cache due to event network-changed-6ce2795b-868b-4ae7-a2df-df5b7137aa42. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1591.434338] env[61978]: DEBUG oslo_concurrency.lockutils [req-9c41ad14-9efb-4a69-9e4b-eb179ce946ef req-726a5a29-110d-430b-8047-d75d01e60760 service nova] Acquiring lock "refresh_cache-03bd4306-195f-44f8-a35c-32869baff416" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.434490] env[61978]: DEBUG oslo_concurrency.lockutils [req-9c41ad14-9efb-4a69-9e4b-eb179ce946ef req-726a5a29-110d-430b-8047-d75d01e60760 service nova] Acquired lock "refresh_cache-03bd4306-195f-44f8-a35c-32869baff416" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.434692] env[61978]: DEBUG nova.network.neutron [req-9c41ad14-9efb-4a69-9e4b-eb179ce946ef req-726a5a29-110d-430b-8047-d75d01e60760 service nova] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Refreshing network info cache for port 6ce2795b-868b-4ae7-a2df-df5b7137aa42 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1591.700440] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396251, 'name': CreateVM_Task, 'duration_secs': 0.280956} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.700855] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1591.701288] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.701458] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.701796] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1591.702351] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55c16e81-8075-4d8a-8939-397efc87f3e8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.706860] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1591.706860] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5254fe1c-d60c-9e09-65de-88cdce28ed17" [ 1591.706860] env[61978]: _type = "Task" [ 1591.706860] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.713932] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5254fe1c-d60c-9e09-65de-88cdce28ed17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.115376] env[61978]: DEBUG nova.network.neutron [req-9c41ad14-9efb-4a69-9e4b-eb179ce946ef req-726a5a29-110d-430b-8047-d75d01e60760 service nova] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Updated VIF entry in instance network info cache for port 6ce2795b-868b-4ae7-a2df-df5b7137aa42. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1592.115712] env[61978]: DEBUG nova.network.neutron [req-9c41ad14-9efb-4a69-9e4b-eb179ce946ef req-726a5a29-110d-430b-8047-d75d01e60760 service nova] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Updating instance_info_cache with network_info: [{"id": "6ce2795b-868b-4ae7-a2df-df5b7137aa42", "address": "fa:16:3e:e5:6f:25", "network": {"id": "a08d803f-f140-462d-a335-21fe5ce33ff5", "bridge": "br-int", "label": "tempest-ServersTestJSON-1636313508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaa4a0cb1a4c45949b43032fd9395200", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ce2795b-86", "ovs_interfaceid": "6ce2795b-868b-4ae7-a2df-df5b7137aa42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.216974] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5254fe1c-d60c-9e09-65de-88cdce28ed17, 'name': SearchDatastore_Task, 'duration_secs': 0.010457} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.217280] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.217519] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1592.217760] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.217910] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.218108] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1592.218368] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7283de9-1497-4205-b564-e7167c79a38e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.225857] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1592.226083] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1592.226759] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63c72a9f-74cd-4cb4-8729-88fa32dba3ea {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.231279] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1592.231279] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52399756-a339-4704-d4e4-52ea10eb47b1" [ 1592.231279] env[61978]: _type = "Task" [ 1592.231279] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.237960] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52399756-a339-4704-d4e4-52ea10eb47b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.618714] env[61978]: DEBUG oslo_concurrency.lockutils [req-9c41ad14-9efb-4a69-9e4b-eb179ce946ef req-726a5a29-110d-430b-8047-d75d01e60760 service nova] Releasing lock "refresh_cache-03bd4306-195f-44f8-a35c-32869baff416" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.740982] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52399756-a339-4704-d4e4-52ea10eb47b1, 'name': SearchDatastore_Task, 'duration_secs': 0.007611} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.741783] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16b8f3d1-7a67-49e4-87d0-ae63ea661430 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.746784] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1592.746784] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5256c94b-f2ff-7431-17d4-a7e08c2e214b" [ 1592.746784] env[61978]: _type = "Task" [ 1592.746784] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.754181] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5256c94b-f2ff-7431-17d4-a7e08c2e214b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.256832] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5256c94b-f2ff-7431-17d4-a7e08c2e214b, 'name': SearchDatastore_Task, 'duration_secs': 0.009698} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.257184] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.257423] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 03bd4306-195f-44f8-a35c-32869baff416/03bd4306-195f-44f8-a35c-32869baff416.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1593.257709] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21d2097b-12b8-4950-b700-f634eee79528 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.264966] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1593.264966] env[61978]: value = "task-1396252" [ 1593.264966] env[61978]: _type = "Task" [ 1593.264966] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.273360] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396252, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.775655] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396252, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.451342} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.776051] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] 03bd4306-195f-44f8-a35c-32869baff416/03bd4306-195f-44f8-a35c-32869baff416.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1593.776277] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1593.776568] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-10611daa-0634-47ae-bf34-2365bb17df2b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.783445] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1593.783445] env[61978]: value = "task-1396253" [ 1593.783445] env[61978]: _type = "Task" [ 1593.783445] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.792348] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396253, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.293840] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396253, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.055449} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.294137] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1594.294913] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e34ba8-addb-4ab7-9669-05caec18642f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.317864] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 03bd4306-195f-44f8-a35c-32869baff416/03bd4306-195f-44f8-a35c-32869baff416.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1594.318106] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-035d6ce8-c8d0-43eb-99b3-f3091de3cd57 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.337650] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1594.337650] env[61978]: value = "task-1396254" [ 1594.337650] env[61978]: _type = "Task" [ 1594.337650] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.344801] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396254, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.847612] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396254, 'name': ReconfigVM_Task, 'duration_secs': 0.279154} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.848136] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 03bd4306-195f-44f8-a35c-32869baff416/03bd4306-195f-44f8-a35c-32869baff416.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1594.848522] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49795169-40d3-480c-89bb-cc28b44b1a36 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.854652] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1594.854652] env[61978]: value = "task-1396255" [ 1594.854652] env[61978]: _type = "Task" [ 1594.854652] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.861832] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396255, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.364377] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396255, 'name': Rename_Task, 'duration_secs': 0.167994} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.364650] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1595.364888] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dbdee0e3-5aca-4941-8b44-798225787e3a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.370853] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1595.370853] env[61978]: value = "task-1396256" [ 1595.370853] env[61978]: _type = "Task" [ 1595.370853] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.377685] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396256, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.880523] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396256, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.382075] env[61978]: DEBUG oslo_vmware.api [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396256, 'name': PowerOnVM_Task, 'duration_secs': 0.627362} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.382075] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1596.382075] env[61978]: INFO nova.compute.manager [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Took 7.12 seconds to spawn the instance on the hypervisor. [ 1596.382075] env[61978]: DEBUG nova.compute.manager [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1596.382636] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45408fa0-8fe0-4044-8da5-ba4e1fcafd07 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.899548] env[61978]: INFO nova.compute.manager [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Took 11.80 seconds to build instance. [ 1597.370764] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5654a7c8-b582-43f4-9b73-99e0f9ce1204 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "03bd4306-195f-44f8-a35c-32869baff416" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.401906] env[61978]: DEBUG oslo_concurrency.lockutils [None req-c8e91b31-88bd-46db-8245-e71b4abfb07f tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "03bd4306-195f-44f8-a35c-32869baff416" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.314s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.402198] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5654a7c8-b582-43f4-9b73-99e0f9ce1204 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "03bd4306-195f-44f8-a35c-32869baff416" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.032s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.402395] env[61978]: DEBUG nova.compute.manager [None req-5654a7c8-b582-43f4-9b73-99e0f9ce1204 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1597.403295] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc84b00-7096-42b7-b431-179cb19ead71 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.409923] env[61978]: DEBUG nova.compute.manager [None req-5654a7c8-b582-43f4-9b73-99e0f9ce1204 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61978) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1597.410476] env[61978]: DEBUG nova.objects.instance [None req-5654a7c8-b582-43f4-9b73-99e0f9ce1204 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lazy-loading 'flavor' on Instance uuid 03bd4306-195f-44f8-a35c-32869baff416 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1597.915049] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5654a7c8-b582-43f4-9b73-99e0f9ce1204 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1597.915417] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b19370a-7a54-4a5f-b49b-c60501d3e412 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.922721] env[61978]: DEBUG oslo_vmware.api [None req-5654a7c8-b582-43f4-9b73-99e0f9ce1204 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1597.922721] env[61978]: value = "task-1396257" [ 1597.922721] env[61978]: _type = "Task" [ 1597.922721] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.930826] env[61978]: DEBUG oslo_vmware.api [None req-5654a7c8-b582-43f4-9b73-99e0f9ce1204 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396257, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.434049] env[61978]: DEBUG oslo_vmware.api [None req-5654a7c8-b582-43f4-9b73-99e0f9ce1204 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396257, 'name': PowerOffVM_Task, 'duration_secs': 0.207189} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.434338] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-5654a7c8-b582-43f4-9b73-99e0f9ce1204 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1598.434529] env[61978]: DEBUG nova.compute.manager [None req-5654a7c8-b582-43f4-9b73-99e0f9ce1204 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1598.435302] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bebb883-2ea9-486f-99e7-f36119f1762e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.946060] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5654a7c8-b582-43f4-9b73-99e0f9ce1204 tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "03bd4306-195f-44f8-a35c-32869baff416" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.544s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.937676] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "03bd4306-195f-44f8-a35c-32869baff416" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.937945] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "03bd4306-195f-44f8-a35c-32869baff416" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1599.938613] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "03bd4306-195f-44f8-a35c-32869baff416-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.938850] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "03bd4306-195f-44f8-a35c-32869baff416-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1599.939105] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "03bd4306-195f-44f8-a35c-32869baff416-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.941273] env[61978]: INFO nova.compute.manager [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Terminating instance [ 1599.942845] env[61978]: DEBUG nova.compute.manager [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1599.943056] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1599.943856] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6a3424-a01e-4507-836a-61e3f6d3e8c7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.951302] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1599.951572] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bd7a554-1f9c-4d91-8b1d-d5945b472b2d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.017248] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1600.017476] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1600.017615] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleting the datastore file [datastore2] 03bd4306-195f-44f8-a35c-32869baff416 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1600.017881] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efa97210-8f5c-4423-8801-1a6493a7c117 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.023380] env[61978]: DEBUG oslo_vmware.api [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1600.023380] env[61978]: value = "task-1396259" [ 1600.023380] env[61978]: _type = "Task" [ 1600.023380] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.030884] env[61978]: DEBUG oslo_vmware.api [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396259, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.533469] env[61978]: DEBUG oslo_vmware.api [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396259, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129748} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.533678] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1600.533849] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1600.534041] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1600.534273] env[61978]: INFO nova.compute.manager [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1600.534472] env[61978]: DEBUG oslo.service.loopingcall [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1600.534666] env[61978]: DEBUG nova.compute.manager [-] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1600.534760] env[61978]: DEBUG nova.network.neutron [-] [instance: 03bd4306-195f-44f8-a35c-32869baff416] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1600.803538] env[61978]: DEBUG nova.compute.manager [req-96a7a80a-26cf-4401-a540-6e494e31763d req-6603ae3b-4b6d-49dd-befb-9db9d95414c6 service nova] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Received event network-vif-deleted-6ce2795b-868b-4ae7-a2df-df5b7137aa42 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1600.803717] env[61978]: INFO nova.compute.manager [req-96a7a80a-26cf-4401-a540-6e494e31763d req-6603ae3b-4b6d-49dd-befb-9db9d95414c6 service nova] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Neutron deleted interface 6ce2795b-868b-4ae7-a2df-df5b7137aa42; detaching it from the instance and deleting it from the info cache [ 1600.803906] env[61978]: DEBUG nova.network.neutron [req-96a7a80a-26cf-4401-a540-6e494e31763d req-6603ae3b-4b6d-49dd-befb-9db9d95414c6 service nova] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1600.891147] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.891456] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.281891] env[61978]: DEBUG nova.network.neutron [-] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.305958] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db7b3df4-6daf-4c46-b97a-7cc6d6b07187 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.316912] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a507dc-b415-441c-aa25-e0677309d5eb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.342945] env[61978]: DEBUG nova.compute.manager [req-96a7a80a-26cf-4401-a540-6e494e31763d req-6603ae3b-4b6d-49dd-befb-9db9d95414c6 service nova] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Detach interface failed, port_id=6ce2795b-868b-4ae7-a2df-df5b7137aa42, reason: Instance 03bd4306-195f-44f8-a35c-32869baff416 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1601.394729] env[61978]: DEBUG nova.compute.utils [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1601.784569] env[61978]: INFO nova.compute.manager [-] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Took 1.25 seconds to deallocate network for instance. [ 1601.897580] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.291369] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.291663] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.291890] env[61978]: DEBUG nova.objects.instance [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lazy-loading 'resources' on Instance uuid 03bd4306-195f-44f8-a35c-32869baff416 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1602.856087] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9f4635-3b12-4485-9671-254ace535273 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.863410] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d2e033-c1a1-4c15-b649-561cb3f71830 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.893349] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f252367b-d394-4e72-be65-882500054ee4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.899819] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ac9605-5663-44ae-9ff4-7652e87884df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.912123] env[61978]: DEBUG nova.compute.provider_tree [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1602.956184] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.956454] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.956686] env[61978]: INFO nova.compute.manager [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Attaching volume d9eba18f-07e9-429e-9ea8-7f5f863eca63 to /dev/sdb [ 1602.985632] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fad4b86-c6e1-4e7c-8806-181465a1cead {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.992571] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11efd03-75c3-4589-b0bf-155a835172b6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.005040] env[61978]: DEBUG nova.virt.block_device [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Updating existing volume attachment record: 5b8601fc-02d2-46cd-a115-7dad572a1d82 {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1603.415608] env[61978]: DEBUG nova.scheduler.client.report [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1603.921335] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.629s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.942347] env[61978]: INFO nova.scheduler.client.report [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted allocations for instance 03bd4306-195f-44f8-a35c-32869baff416 [ 1604.449999] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9b9f9832-070e-475c-b3cd-5106ab85d88a tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "03bd4306-195f-44f8-a35c-32869baff416" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.512s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.797155] env[61978]: DEBUG oslo_concurrency.lockutils [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "733c2f53-04d3-4a8b-a7c1-5194d7961a31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.797608] env[61978]: DEBUG oslo_concurrency.lockutils [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "733c2f53-04d3-4a8b-a7c1-5194d7961a31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.797675] env[61978]: DEBUG oslo_concurrency.lockutils [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "733c2f53-04d3-4a8b-a7c1-5194d7961a31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.797868] env[61978]: DEBUG oslo_concurrency.lockutils [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "733c2f53-04d3-4a8b-a7c1-5194d7961a31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.798065] env[61978]: DEBUG oslo_concurrency.lockutils [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "733c2f53-04d3-4a8b-a7c1-5194d7961a31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.800390] env[61978]: INFO nova.compute.manager [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Terminating instance [ 1605.802228] env[61978]: DEBUG nova.compute.manager [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1605.802439] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1605.803288] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87988762-5d49-4a87-ad02-e5dfe5cb5635 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.810869] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1605.811114] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f67817a-7b53-4f09-b5a3-48d87c11aebb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.817174] env[61978]: DEBUG oslo_vmware.api [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1605.817174] env[61978]: value = "task-1396262" [ 1605.817174] env[61978]: _type = "Task" [ 1605.817174] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.826109] env[61978]: DEBUG oslo_vmware.api [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396262, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.327325] env[61978]: DEBUG oslo_vmware.api [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396262, 'name': PowerOffVM_Task, 'duration_secs': 0.19771} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.327994] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1606.327994] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1606.328171] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a2ec190-2b75-4083-83ff-1ac50e2432d3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.389473] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1606.389707] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1606.389899] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleting the datastore file [datastore2] 733c2f53-04d3-4a8b-a7c1-5194d7961a31 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1606.390304] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c05fb6f-10cd-4e26-9c34-6f87bb590d07 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.397377] env[61978]: DEBUG oslo_vmware.api [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for the task: (returnval){ [ 1606.397377] env[61978]: value = "task-1396264" [ 1606.397377] env[61978]: _type = "Task" [ 1606.397377] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.404518] env[61978]: DEBUG oslo_vmware.api [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396264, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.907598] env[61978]: DEBUG oslo_vmware.api [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Task: {'id': task-1396264, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156809} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.907949] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1606.908049] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1606.908296] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1606.908524] env[61978]: INFO nova.compute.manager [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1606.908778] env[61978]: DEBUG oslo.service.loopingcall [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.908978] env[61978]: DEBUG nova.compute.manager [-] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1606.909108] env[61978]: DEBUG nova.network.neutron [-] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1607.178558] env[61978]: DEBUG nova.compute.manager [req-31e76804-e4e1-4c1d-a528-15b6d72a2fa6 req-a238f0e5-051e-4633-9fb8-4436b28efe00 service nova] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Received event network-vif-deleted-850a6613-240f-4bb6-a3bd-cd95cd2ebe18 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1607.179072] env[61978]: INFO nova.compute.manager [req-31e76804-e4e1-4c1d-a528-15b6d72a2fa6 req-a238f0e5-051e-4633-9fb8-4436b28efe00 service nova] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Neutron deleted interface 850a6613-240f-4bb6-a3bd-cd95cd2ebe18; detaching it from the instance and deleting it from the info cache [ 1607.179228] env[61978]: DEBUG nova.network.neutron [req-31e76804-e4e1-4c1d-a528-15b6d72a2fa6 req-a238f0e5-051e-4633-9fb8-4436b28efe00 service nova] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.546705] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1607.546963] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296093', 'volume_id': 'd9eba18f-07e9-429e-9ea8-7f5f863eca63', 'name': 'volume-d9eba18f-07e9-429e-9ea8-7f5f863eca63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd542b9ef-a4d8-4dad-8b97-b9e67372e214', 'attached_at': '', 'detached_at': '', 'volume_id': 'd9eba18f-07e9-429e-9ea8-7f5f863eca63', 'serial': 'd9eba18f-07e9-429e-9ea8-7f5f863eca63'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1607.547895] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3172fb1-9d1c-4297-ac14-242fa7dcc5e7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.564274] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1078a528-348b-4aeb-8be7-236efe20efda {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.587623] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] volume-d9eba18f-07e9-429e-9ea8-7f5f863eca63/volume-d9eba18f-07e9-429e-9ea8-7f5f863eca63.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1607.587878] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d90a2cbf-ae7b-4eb0-b0fc-2bfe5c3cd314 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.606703] env[61978]: DEBUG oslo_vmware.api [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1607.606703] env[61978]: value = "task-1396265" [ 1607.606703] env[61978]: _type = "Task" [ 1607.606703] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.615537] env[61978]: DEBUG oslo_vmware.api [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396265, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.641087] env[61978]: DEBUG nova.network.neutron [-] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.681864] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fc7c0069-ab2e-46bd-87c1-4001b6e9854a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.691501] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8593bb-858a-4ed6-9a06-3a44870dd253 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.715798] env[61978]: DEBUG nova.compute.manager [req-31e76804-e4e1-4c1d-a528-15b6d72a2fa6 req-a238f0e5-051e-4633-9fb8-4436b28efe00 service nova] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Detach interface failed, port_id=850a6613-240f-4bb6-a3bd-cd95cd2ebe18, reason: Instance 733c2f53-04d3-4a8b-a7c1-5194d7961a31 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1608.118320] env[61978]: DEBUG oslo_vmware.api [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396265, 'name': ReconfigVM_Task, 'duration_secs': 0.343912} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.118597] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Reconfigured VM instance instance-0000007a to attach disk [datastore2] volume-d9eba18f-07e9-429e-9ea8-7f5f863eca63/volume-d9eba18f-07e9-429e-9ea8-7f5f863eca63.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1608.123064] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f348bad2-53ca-4a05-a431-64c1558e8052 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.137315] env[61978]: DEBUG oslo_vmware.api [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1608.137315] env[61978]: value = "task-1396266" [ 1608.137315] env[61978]: _type = "Task" [ 1608.137315] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.145713] env[61978]: INFO nova.compute.manager [-] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Took 1.24 seconds to deallocate network for instance. [ 1608.146071] env[61978]: DEBUG oslo_vmware.api [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396266, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.647122] env[61978]: DEBUG oslo_vmware.api [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396266, 'name': ReconfigVM_Task, 'duration_secs': 0.1252} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.647432] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296093', 'volume_id': 'd9eba18f-07e9-429e-9ea8-7f5f863eca63', 'name': 'volume-d9eba18f-07e9-429e-9ea8-7f5f863eca63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd542b9ef-a4d8-4dad-8b97-b9e67372e214', 'attached_at': '', 'detached_at': '', 'volume_id': 'd9eba18f-07e9-429e-9ea8-7f5f863eca63', 'serial': 'd9eba18f-07e9-429e-9ea8-7f5f863eca63'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1608.652015] env[61978]: DEBUG oslo_concurrency.lockutils [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.652278] env[61978]: DEBUG oslo_concurrency.lockutils [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.652510] env[61978]: DEBUG nova.objects.instance [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lazy-loading 'resources' on Instance uuid 733c2f53-04d3-4a8b-a7c1-5194d7961a31 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1609.206331] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d54817-a2b6-463e-9fbf-92d7d2aea74a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.213793] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb53474-7d3f-4195-b230-20d6602c05c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.243667] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efaad80a-3589-423c-97b6-369c8469414d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.250644] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57c0918-e3cc-4fac-9aee-6ce6d970e226 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.263324] env[61978]: DEBUG nova.compute.provider_tree [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1609.685620] env[61978]: DEBUG nova.objects.instance [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lazy-loading 'flavor' on Instance uuid d542b9ef-a4d8-4dad-8b97-b9e67372e214 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1609.767030] env[61978]: DEBUG nova.scheduler.client.report [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1610.191784] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6c0c3c94-339c-4dbc-a35e-93ad07b3087e tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.235s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.270818] env[61978]: DEBUG oslo_concurrency.lockutils [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.618s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.291736] env[61978]: INFO nova.scheduler.client.report [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Deleted allocations for instance 733c2f53-04d3-4a8b-a7c1-5194d7961a31 [ 1610.406154] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1610.406419] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.799236] env[61978]: DEBUG oslo_concurrency.lockutils [None req-159d8382-37d2-4a92-954d-e30d5a2449ce tempest-ServersTestJSON-1559277588 tempest-ServersTestJSON-1559277588-project-member] Lock "733c2f53-04d3-4a8b-a7c1-5194d7961a31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.002s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.910276] env[61978]: INFO nova.compute.manager [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Detaching volume d9eba18f-07e9-429e-9ea8-7f5f863eca63 [ 1611.077301] env[61978]: INFO nova.virt.block_device [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Attempting to driver detach volume d9eba18f-07e9-429e-9ea8-7f5f863eca63 from mountpoint /dev/sdb [ 1611.077549] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1611.077741] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296093', 'volume_id': 'd9eba18f-07e9-429e-9ea8-7f5f863eca63', 'name': 'volume-d9eba18f-07e9-429e-9ea8-7f5f863eca63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd542b9ef-a4d8-4dad-8b97-b9e67372e214', 'attached_at': '', 'detached_at': '', 'volume_id': 'd9eba18f-07e9-429e-9ea8-7f5f863eca63', 'serial': 'd9eba18f-07e9-429e-9ea8-7f5f863eca63'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1611.078643] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7875c7-41bc-4e9c-aabb-22600eb6251d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.099090] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb424b9b-43e4-474d-95da-d9fa30096e46 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.105451] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e55cdb9-a0f9-4434-9907-3b621b804572 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.124168] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf823dc-0a52-494f-8ad9-6549c3344299 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.139103] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] The volume has not been displaced from its original location: [datastore2] volume-d9eba18f-07e9-429e-9ea8-7f5f863eca63/volume-d9eba18f-07e9-429e-9ea8-7f5f863eca63.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1611.144176] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Reconfiguring VM instance instance-0000007a to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1611.144459] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f88ea1dc-893c-42e0-b1c5-7d1618395a89 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.161778] env[61978]: DEBUG oslo_vmware.api [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1611.161778] env[61978]: value = "task-1396267" [ 1611.161778] env[61978]: _type = "Task" [ 1611.161778] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.169906] env[61978]: DEBUG oslo_vmware.api [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396267, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.671760] env[61978]: DEBUG oslo_vmware.api [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396267, 'name': ReconfigVM_Task, 'duration_secs': 0.214144} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.672102] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Reconfigured VM instance instance-0000007a to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1611.676497] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad1bb0cd-b7a0-4717-8d03-556f0f0fa8cc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.691514] env[61978]: DEBUG oslo_vmware.api [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1611.691514] env[61978]: value = "task-1396268" [ 1611.691514] env[61978]: _type = "Task" [ 1611.691514] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.699249] env[61978]: DEBUG oslo_vmware.api [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396268, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.202067] env[61978]: DEBUG oslo_vmware.api [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396268, 'name': ReconfigVM_Task, 'duration_secs': 0.238305} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.202382] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296093', 'volume_id': 'd9eba18f-07e9-429e-9ea8-7f5f863eca63', 'name': 'volume-d9eba18f-07e9-429e-9ea8-7f5f863eca63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd542b9ef-a4d8-4dad-8b97-b9e67372e214', 'attached_at': '', 'detached_at': '', 'volume_id': 'd9eba18f-07e9-429e-9ea8-7f5f863eca63', 'serial': 'd9eba18f-07e9-429e-9ea8-7f5f863eca63'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1612.743613] env[61978]: DEBUG nova.objects.instance [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lazy-loading 'flavor' on Instance uuid d542b9ef-a4d8-4dad-8b97-b9e67372e214 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1613.751761] env[61978]: DEBUG oslo_concurrency.lockutils [None req-5f4e24f1-bf2d-46d6-baa3-d855b811a795 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.345s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.665564] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.665823] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.666052] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.666246] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.666417] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.668385] env[61978]: INFO nova.compute.manager [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Terminating instance [ 1614.670412] env[61978]: DEBUG nova.compute.manager [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1614.670615] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1614.671451] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55f8727-9ed9-4cb3-a3a0-5f55365d46ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.679008] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1614.679269] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-331637ff-30de-478e-860c-6b4b78e5357b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.685324] env[61978]: DEBUG oslo_vmware.api [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1614.685324] env[61978]: value = "task-1396269" [ 1614.685324] env[61978]: _type = "Task" [ 1614.685324] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.692633] env[61978]: DEBUG oslo_vmware.api [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396269, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.140533] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.140872] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.141016] env[61978]: INFO nova.compute.manager [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Shelving [ 1615.194446] env[61978]: DEBUG oslo_vmware.api [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396269, 'name': PowerOffVM_Task, 'duration_secs': 0.163074} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.194765] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1615.194992] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1615.195316] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe98a944-c391-46b3-bd0f-04b40b56ab99 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.253680] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1615.253934] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1615.254131] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Deleting the datastore file [datastore2] d542b9ef-a4d8-4dad-8b97-b9e67372e214 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1615.254399] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19065aa6-a206-46c3-82c7-97802bdd7526 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.260413] env[61978]: DEBUG oslo_vmware.api [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1615.260413] env[61978]: value = "task-1396271" [ 1615.260413] env[61978]: _type = "Task" [ 1615.260413] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.267813] env[61978]: DEBUG oslo_vmware.api [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396271, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.648703] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1615.649103] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e1e76bf-e871-4de3-8cdd-427c21ace374 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.656162] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1615.656162] env[61978]: value = "task-1396272" [ 1615.656162] env[61978]: _type = "Task" [ 1615.656162] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.665641] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396272, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.770185] env[61978]: DEBUG oslo_vmware.api [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396271, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139704} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.770413] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1615.770616] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1615.770803] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1615.770984] env[61978]: INFO nova.compute.manager [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1615.771244] env[61978]: DEBUG oslo.service.loopingcall [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1615.771453] env[61978]: DEBUG nova.compute.manager [-] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1615.771557] env[61978]: DEBUG nova.network.neutron [-] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1616.166249] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396272, 'name': PowerOffVM_Task, 'duration_secs': 0.222954} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.166623] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1616.167313] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3adda409-064d-4977-8b77-a5b882cafcaa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.186778] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46bc86e4-9d97-41cf-8180-9298fa5398f9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.223309] env[61978]: DEBUG nova.compute.manager [req-ef85fcbc-79dd-478a-96b2-7f5368cfc696 req-30880324-2239-47e0-8e8b-dbd539dfad3a service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Received event network-vif-deleted-bcdfd00c-adfd-4464-8273-d5ef57460a54 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1616.223546] env[61978]: INFO nova.compute.manager [req-ef85fcbc-79dd-478a-96b2-7f5368cfc696 req-30880324-2239-47e0-8e8b-dbd539dfad3a service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Neutron deleted interface bcdfd00c-adfd-4464-8273-d5ef57460a54; detaching it from the instance and deleting it from the info cache [ 1616.223779] env[61978]: DEBUG nova.network.neutron [req-ef85fcbc-79dd-478a-96b2-7f5368cfc696 req-30880324-2239-47e0-8e8b-dbd539dfad3a service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.698618] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Creating Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1616.698944] env[61978]: DEBUG nova.network.neutron [-] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.700163] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f1cd2325-90a1-453b-a831-d01c88009662 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.710293] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1616.710293] env[61978]: value = "task-1396273" [ 1616.710293] env[61978]: _type = "Task" [ 1616.710293] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.720494] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396273, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.727288] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bcbc1dde-dafb-427f-9f9e-3e9a17bb03d2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.735831] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aeea69b-71d9-4f56-a9aa-096819ea85c4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.761517] env[61978]: DEBUG nova.compute.manager [req-ef85fcbc-79dd-478a-96b2-7f5368cfc696 req-30880324-2239-47e0-8e8b-dbd539dfad3a service nova] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Detach interface failed, port_id=bcdfd00c-adfd-4464-8273-d5ef57460a54, reason: Instance d542b9ef-a4d8-4dad-8b97-b9e67372e214 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1617.204136] env[61978]: INFO nova.compute.manager [-] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Took 1.43 seconds to deallocate network for instance. [ 1617.219632] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396273, 'name': CreateSnapshot_Task, 'duration_secs': 0.380696} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.219886] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Created Snapshot of the VM instance {{(pid=61978) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1617.220630] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7f6550-f3d5-4e3b-85b4-4b4e1e046bb9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.710450] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.710723] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.710950] env[61978]: DEBUG nova.objects.instance [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lazy-loading 'resources' on Instance uuid d542b9ef-a4d8-4dad-8b97-b9e67372e214 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1617.737125] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Creating linked-clone VM from snapshot {{(pid=61978) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1617.737615] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-65895426-907f-46bf-965d-e9332dedc14f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.745692] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1617.745692] env[61978]: value = "task-1396274" [ 1617.745692] env[61978]: _type = "Task" [ 1617.745692] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.753241] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396274, 'name': CloneVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.258410] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396274, 'name': CloneVM_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.263083] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9b7e17-0b5d-42b3-a4ff-eaf8b684332d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.269650] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527e67a5-d42a-4725-b31f-97ef9ace0e03 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.301626] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fee5929-6435-4c8e-b3ab-cc794702bbd0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.309202] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e0f315-a7e6-4ff0-a93b-42364055596d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.322316] env[61978]: DEBUG nova.compute.provider_tree [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1618.756310] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396274, 'name': CloneVM_Task, 'duration_secs': 0.961095} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.756572] env[61978]: INFO nova.virt.vmwareapi.vmops [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Created linked-clone VM from snapshot [ 1618.757284] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057c054f-df1e-46fd-80de-14593395c697 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.763888] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Uploading image 0694b3db-45b7-4f54-933b-d14fc9f9453d {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1618.788331] env[61978]: DEBUG oslo_vmware.rw_handles [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1618.788331] env[61978]: value = "vm-296095" [ 1618.788331] env[61978]: _type = "VirtualMachine" [ 1618.788331] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1618.788579] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-23f1ba2d-d5c3-4ddb-86d0-062745ec6605 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.795435] env[61978]: DEBUG oslo_vmware.rw_handles [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lease: (returnval){ [ 1618.795435] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522ec950-27c7-fb91-f799-b48183a0271c" [ 1618.795435] env[61978]: _type = "HttpNfcLease" [ 1618.795435] env[61978]: } obtained for exporting VM: (result){ [ 1618.795435] env[61978]: value = "vm-296095" [ 1618.795435] env[61978]: _type = "VirtualMachine" [ 1618.795435] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1618.795690] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the lease: (returnval){ [ 1618.795690] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522ec950-27c7-fb91-f799-b48183a0271c" [ 1618.795690] env[61978]: _type = "HttpNfcLease" [ 1618.795690] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1618.801699] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1618.801699] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522ec950-27c7-fb91-f799-b48183a0271c" [ 1618.801699] env[61978]: _type = "HttpNfcLease" [ 1618.801699] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1618.825676] env[61978]: DEBUG nova.scheduler.client.report [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1619.304669] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1619.304669] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522ec950-27c7-fb91-f799-b48183a0271c" [ 1619.304669] env[61978]: _type = "HttpNfcLease" [ 1619.304669] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1619.305136] env[61978]: DEBUG oslo_vmware.rw_handles [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1619.305136] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522ec950-27c7-fb91-f799-b48183a0271c" [ 1619.305136] env[61978]: _type = "HttpNfcLease" [ 1619.305136] env[61978]: }. {{(pid=61978) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1619.305690] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd0cd93-0412-4780-802c-843d682bd6bd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.312777] env[61978]: DEBUG oslo_vmware.rw_handles [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52539e53-9bec-4137-fdd2-065b777d4ab2/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1619.312958] env[61978]: DEBUG oslo_vmware.rw_handles [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52539e53-9bec-4137-fdd2-065b777d4ab2/disk-0.vmdk for reading. {{(pid=61978) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1619.369371] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.658s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.389304] env[61978]: INFO nova.scheduler.client.report [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Deleted allocations for instance d542b9ef-a4d8-4dad-8b97-b9e67372e214 [ 1619.400460] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-45760b40-bd4e-4ce4-9b1e-e642d90b5335 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.896479] env[61978]: DEBUG oslo_concurrency.lockutils [None req-d9e2d18d-5239-4d8c-babe-a3b19abfc8f4 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "d542b9ef-a4d8-4dad-8b97-b9e67372e214" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.231s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.575560] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.575932] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.079327] env[61978]: DEBUG nova.compute.manager [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Starting instance... {{(pid=61978) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1622.600338] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.600612] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.602131] env[61978]: INFO nova.compute.claims [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1623.649178] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264f148d-a305-4e46-8a8e-675c913e68b8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.656590] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3637468-35d4-4a13-906b-b092b1f96a99 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.686625] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16451e50-9911-46b9-8203-8ef4064838a3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.693937] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa2fe3a-5a56-4b4a-8017-e34ffb27a610 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.706703] env[61978]: DEBUG nova.compute.provider_tree [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1624.210326] env[61978]: DEBUG nova.scheduler.client.report [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1624.715222] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.114s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.715779] env[61978]: DEBUG nova.compute.manager [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Start building networks asynchronously for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1625.222647] env[61978]: DEBUG nova.compute.utils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1625.224363] env[61978]: DEBUG nova.compute.manager [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Allocating IP information in the background. {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1625.224363] env[61978]: DEBUG nova.network.neutron [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] allocate_for_instance() {{(pid=61978) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1625.333302] env[61978]: DEBUG nova.policy [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd72a836e3aef4b59b1092b91f33fd929', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b289cdad1fe4ad38c5d987680be2367', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61978) authorize /opt/stack/nova/nova/policy.py:201}} [ 1625.603191] env[61978]: DEBUG nova.network.neutron [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Successfully created port: aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d {{(pid=61978) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1625.727960] env[61978]: DEBUG nova.compute.manager [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Start building block device mappings for instance. {{(pid=61978) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1626.736923] env[61978]: DEBUG nova.compute.manager [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Start spawning the instance on the hypervisor. {{(pid=61978) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1626.764567] env[61978]: DEBUG nova.virt.hardware [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-04T14:55:46Z,direct_url=,disk_format='vmdk',id=4732143d-796a-4a66-9f1e-806f8b0654e0,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3da19a374c3148e4b31cec361b3dbeed',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-04T14:55:47Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1626.764861] env[61978]: DEBUG nova.virt.hardware [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1626.765038] env[61978]: DEBUG nova.virt.hardware [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1626.765242] env[61978]: DEBUG nova.virt.hardware [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1626.765398] env[61978]: DEBUG nova.virt.hardware [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1626.765552] env[61978]: DEBUG nova.virt.hardware [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1626.765765] env[61978]: DEBUG nova.virt.hardware [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1626.765930] env[61978]: DEBUG nova.virt.hardware [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1626.766117] env[61978]: DEBUG nova.virt.hardware [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1626.766293] env[61978]: DEBUG nova.virt.hardware [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1626.766509] env[61978]: DEBUG nova.virt.hardware [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1626.767455] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f2e329-2867-4b74-a0b8-dd21dbc5d15d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.775401] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cc3a75-7570-4f0c-8f88-845cdb419cdc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.982388] env[61978]: DEBUG nova.compute.manager [req-51876daf-28b9-425f-890b-6d5be462cdb3 req-431610c3-d587-43b9-b7cc-c153baabf81f service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Received event network-vif-plugged-aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1626.982623] env[61978]: DEBUG oslo_concurrency.lockutils [req-51876daf-28b9-425f-890b-6d5be462cdb3 req-431610c3-d587-43b9-b7cc-c153baabf81f service nova] Acquiring lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.982835] env[61978]: DEBUG oslo_concurrency.lockutils [req-51876daf-28b9-425f-890b-6d5be462cdb3 req-431610c3-d587-43b9-b7cc-c153baabf81f service nova] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.983068] env[61978]: DEBUG oslo_concurrency.lockutils [req-51876daf-28b9-425f-890b-6d5be462cdb3 req-431610c3-d587-43b9-b7cc-c153baabf81f service nova] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.983199] env[61978]: DEBUG nova.compute.manager [req-51876daf-28b9-425f-890b-6d5be462cdb3 req-431610c3-d587-43b9-b7cc-c153baabf81f service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] No waiting events found dispatching network-vif-plugged-aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1626.983389] env[61978]: WARNING nova.compute.manager [req-51876daf-28b9-425f-890b-6d5be462cdb3 req-431610c3-d587-43b9-b7cc-c153baabf81f service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Received unexpected event network-vif-plugged-aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d for instance with vm_state building and task_state spawning. [ 1627.480073] env[61978]: DEBUG oslo_vmware.rw_handles [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52539e53-9bec-4137-fdd2-065b777d4ab2/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1627.480737] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403a6d1a-bdbe-421e-9c2f-0b30e069aca6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.487480] env[61978]: DEBUG oslo_vmware.rw_handles [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52539e53-9bec-4137-fdd2-065b777d4ab2/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1627.487656] env[61978]: ERROR oslo_vmware.rw_handles [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52539e53-9bec-4137-fdd2-065b777d4ab2/disk-0.vmdk due to incomplete transfer. [ 1627.487885] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8371a87c-2e5c-41dd-a574-e91efb4d0c70 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.494646] env[61978]: DEBUG oslo_vmware.rw_handles [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52539e53-9bec-4137-fdd2-065b777d4ab2/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1627.494850] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Uploaded image 0694b3db-45b7-4f54-933b-d14fc9f9453d to the Glance image server {{(pid=61978) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1627.497135] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Destroying the VM {{(pid=61978) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1627.497703] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d9573831-e5ae-48dd-8e52-ebffd1955adf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.503702] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1627.503702] env[61978]: value = "task-1396276" [ 1627.503702] env[61978]: _type = "Task" [ 1627.503702] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.511312] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396276, 'name': Destroy_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.542131] env[61978]: DEBUG nova.network.neutron [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Successfully updated port: aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d {{(pid=61978) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1627.565176] env[61978]: DEBUG nova.compute.manager [req-7c1a2523-a537-4fb7-8824-2567d7c024f0 req-3423b407-1f0c-4dff-ad85-a7dfc685a2c6 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Received event network-changed-aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1627.565387] env[61978]: DEBUG nova.compute.manager [req-7c1a2523-a537-4fb7-8824-2567d7c024f0 req-3423b407-1f0c-4dff-ad85-a7dfc685a2c6 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Refreshing instance network info cache due to event network-changed-aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1627.565618] env[61978]: DEBUG oslo_concurrency.lockutils [req-7c1a2523-a537-4fb7-8824-2567d7c024f0 req-3423b407-1f0c-4dff-ad85-a7dfc685a2c6 service nova] Acquiring lock "refresh_cache-a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.566272] env[61978]: DEBUG oslo_concurrency.lockutils [req-7c1a2523-a537-4fb7-8824-2567d7c024f0 req-3423b407-1f0c-4dff-ad85-a7dfc685a2c6 service nova] Acquired lock "refresh_cache-a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.566272] env[61978]: DEBUG nova.network.neutron [req-7c1a2523-a537-4fb7-8824-2567d7c024f0 req-3423b407-1f0c-4dff-ad85-a7dfc685a2c6 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Refreshing network info cache for port aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1628.014926] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396276, 'name': Destroy_Task, 'duration_secs': 0.313537} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.015315] env[61978]: INFO nova.virt.vmwareapi.vm_util [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Destroyed the VM [ 1628.015493] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Deleting Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1628.015743] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-231a94f1-09bf-4021-b019-d3238ce6b711 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.022374] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1628.022374] env[61978]: value = "task-1396277" [ 1628.022374] env[61978]: _type = "Task" [ 1628.022374] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.029647] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396277, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.045220] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "refresh_cache-a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.098616] env[61978]: DEBUG nova.network.neutron [req-7c1a2523-a537-4fb7-8824-2567d7c024f0 req-3423b407-1f0c-4dff-ad85-a7dfc685a2c6 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1628.167094] env[61978]: DEBUG nova.network.neutron [req-7c1a2523-a537-4fb7-8824-2567d7c024f0 req-3423b407-1f0c-4dff-ad85-a7dfc685a2c6 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.532294] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396277, 'name': RemoveSnapshot_Task, 'duration_secs': 0.342371} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.532572] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Deleted Snapshot of the VM instance {{(pid=61978) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1628.532854] env[61978]: DEBUG nova.compute.manager [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1628.533623] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8971b0-f631-4182-b684-929b7a946a33 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.671056] env[61978]: DEBUG oslo_concurrency.lockutils [req-7c1a2523-a537-4fb7-8824-2567d7c024f0 req-3423b407-1f0c-4dff-ad85-a7dfc685a2c6 service nova] Releasing lock "refresh_cache-a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.671056] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired lock "refresh_cache-a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.671056] env[61978]: DEBUG nova.network.neutron [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1629.045837] env[61978]: INFO nova.compute.manager [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Shelve offloading [ 1629.047498] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1629.047754] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-358b889a-8b82-4aac-94fb-6f98c19b98e4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.054400] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1629.054400] env[61978]: value = "task-1396278" [ 1629.054400] env[61978]: _type = "Task" [ 1629.054400] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.062021] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396278, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.201877] env[61978]: DEBUG nova.network.neutron [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Instance cache missing network info. {{(pid=61978) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1629.492476] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.492694] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.492952] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1629.507158] env[61978]: DEBUG nova.network.neutron [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Updating instance_info_cache with network_info: [{"id": "aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d", "address": "fa:16:3e:88:c7:c5", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa9e6b39-2f", "ovs_interfaceid": "aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.564041] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] VM already powered off {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1629.564204] env[61978]: DEBUG nova.compute.manager [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1629.564936] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582d4aa9-76f0-43ab-91b4-7a82822238e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.570302] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1629.570469] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1629.570641] env[61978]: DEBUG nova.network.neutron [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1629.995568] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1629.995827] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.995965] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.996166] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.996278] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.996419] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.996567] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.996699] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1629.996842] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.009076] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Releasing lock "refresh_cache-a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.009407] env[61978]: DEBUG nova.compute.manager [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Instance network_info: |[{"id": "aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d", "address": "fa:16:3e:88:c7:c5", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa9e6b39-2f", "ovs_interfaceid": "aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61978) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1630.010092] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:c7:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '67921bdb-a7a0-46b5-ba05-ca997496e222', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1630.017271] env[61978]: DEBUG oslo.service.loopingcall [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1630.017716] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1630.017948] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4bec0cb4-ec9f-4c2d-881a-30f66b461300 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.037799] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1630.037799] env[61978]: value = "task-1396279" [ 1630.037799] env[61978]: _type = "Task" [ 1630.037799] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.046027] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396279, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.290611] env[61978]: DEBUG nova.network.neutron [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updating instance_info_cache with network_info: [{"id": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "address": "fa:16:3e:ae:0d:68", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1053461c-99", "ovs_interfaceid": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1630.499688] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.499956] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.500181] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.500387] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1630.501417] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d8bda6-570d-4b12-805d-97a3ca87d61d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.511345] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83666c8-b9eb-4a25-bf5f-ba0a9be1de48 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.524560] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9feee6b2-460f-4be3-81df-d58d3edef8e7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.530807] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a429ac0a-222d-4133-b3b9-ca9e667d376c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.558534] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180860MB free_disk=185GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1630.558670] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.558858] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.568149] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396279, 'name': CreateVM_Task, 'duration_secs': 0.295074} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.568348] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1630.569030] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.569245] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.569575] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1630.569805] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d092abb-4700-4a4b-a0c3-6c881d0dac97 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.573813] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1630.573813] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]522e9514-0b6a-f8d7-9c55-2567f0223433" [ 1630.573813] env[61978]: _type = "Task" [ 1630.573813] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.581660] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522e9514-0b6a-f8d7-9c55-2567f0223433, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.793221] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.032515] env[61978]: DEBUG nova.compute.manager [req-3e2dbda5-3c6c-47f6-bcae-f01f3327a080 req-e22799ab-b9d5-4130-8f5f-20cea7469e21 service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Received event network-vif-unplugged-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1631.032746] env[61978]: DEBUG oslo_concurrency.lockutils [req-3e2dbda5-3c6c-47f6-bcae-f01f3327a080 req-e22799ab-b9d5-4130-8f5f-20cea7469e21 service nova] Acquiring lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.032963] env[61978]: DEBUG oslo_concurrency.lockutils [req-3e2dbda5-3c6c-47f6-bcae-f01f3327a080 req-e22799ab-b9d5-4130-8f5f-20cea7469e21 service nova] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.033211] env[61978]: DEBUG oslo_concurrency.lockutils [req-3e2dbda5-3c6c-47f6-bcae-f01f3327a080 req-e22799ab-b9d5-4130-8f5f-20cea7469e21 service nova] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.033401] env[61978]: DEBUG nova.compute.manager [req-3e2dbda5-3c6c-47f6-bcae-f01f3327a080 req-e22799ab-b9d5-4130-8f5f-20cea7469e21 service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] No waiting events found dispatching network-vif-unplugged-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1631.033581] env[61978]: WARNING nova.compute.manager [req-3e2dbda5-3c6c-47f6-bcae-f01f3327a080 req-e22799ab-b9d5-4130-8f5f-20cea7469e21 service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Received unexpected event network-vif-unplugged-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 for instance with vm_state shelved and task_state shelving_offloading. [ 1631.085477] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]522e9514-0b6a-f8d7-9c55-2567f0223433, 'name': SearchDatastore_Task, 'duration_secs': 0.008995} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.085784] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.085996] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Processing image 4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1631.086349] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.086536] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.086729] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1631.086995] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1844b88-5733-4ae6-aa43-7a6a952ae802 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.094810] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1631.094992] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1631.095679] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa9d937a-b7f7-4678-8a45-f21b773dccb9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.100953] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1631.100953] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52912ae7-8798-4ee4-08ea-4374af240311" [ 1631.100953] env[61978]: _type = "Task" [ 1631.100953] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.110157] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52912ae7-8798-4ee4-08ea-4374af240311, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.126588] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1631.127352] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8e3589-8205-4bad-8a75-fb10e2a2a56a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.133531] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1631.133752] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5dc2d56b-96e4-4a83-832d-c5fb604f2a85 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.198799] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1631.199188] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1631.199398] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleting the datastore file [datastore2] a48d5ef1-b66b-429e-bbff-2351ad5eda32 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1631.199670] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b292b697-e0c4-41d0-a69e-1bd932e781b5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.205665] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1631.205665] env[61978]: value = "task-1396281" [ 1631.205665] env[61978]: _type = "Task" [ 1631.205665] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.212670] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396281, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.585203] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance a48d5ef1-b66b-429e-bbff-2351ad5eda32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.585374] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1631.585555] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1631.585698] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1631.610825] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52912ae7-8798-4ee4-08ea-4374af240311, 'name': SearchDatastore_Task, 'duration_secs': 0.008328} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.613809] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-668b3777-9b67-4d04-b8cb-2378563e1a6f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.618782] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1631.618782] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]5265ca4b-363f-88a4-400e-d4b2a96396ce" [ 1631.618782] env[61978]: _type = "Task" [ 1631.618782] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.624150] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a43b615-710f-4d0f-bfd8-5137261147f1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.629578] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5265ca4b-363f-88a4-400e-d4b2a96396ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.633285] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a34a76-14e7-4939-81ef-2655d1c93533 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.661724] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70b0eaa-ee24-4fc2-9748-df423fbc3aa7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.668423] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fffa96-0f77-4a9d-b225-a3912dfa4efb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.680920] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1631.715377] env[61978]: DEBUG oslo_vmware.api [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396281, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124019} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.715618] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1631.715803] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1631.715983] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1631.734653] env[61978]: INFO nova.scheduler.client.report [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleted allocations for instance a48d5ef1-b66b-429e-bbff-2351ad5eda32 [ 1632.129264] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]5265ca4b-363f-88a4-400e-d4b2a96396ce, 'name': SearchDatastore_Task, 'duration_secs': 0.00856} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.129549] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.129757] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e/a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1632.129982] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d0a7022-9f7a-4d22-85cd-190066c6150a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.135863] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1632.135863] env[61978]: value = "task-1396282" [ 1632.135863] env[61978]: _type = "Task" [ 1632.135863] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.143187] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396282, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.200553] env[61978]: ERROR nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [req-d216127b-0027-4a61-b972-fc8b33187ed0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d216127b-0027-4a61-b972-fc8b33187ed0"}]} [ 1632.216391] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1632.229656] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1632.229874] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1632.238601] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.240038] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1632.257334] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1632.280598] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370f6c04-8817-4ef4-8023-f2525d5db967 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.288131] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf76f7d7-888c-4ccf-a3b0-35d783d1da0b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.317500] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a23d839-45b8-4c54-a09a-6c813af5d791 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.325008] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fb1250-936f-49f2-b3bc-1b00e157f6b2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.340636] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1632.647297] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396282, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507776} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.647573] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0/4732143d-796a-4a66-9f1e-806f8b0654e0.vmdk to [datastore2] a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e/a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1632.647801] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Extending root virtual disk to 1048576 {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1632.648116] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4bd1254a-2efa-4bee-aded-e0b4a3aceb6d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.654475] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1632.654475] env[61978]: value = "task-1396283" [ 1632.654475] env[61978]: _type = "Task" [ 1632.654475] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.661911] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396283, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.870072] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 180 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1632.870324] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 180 to 181 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1632.870472] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1633.056860] env[61978]: DEBUG nova.compute.manager [req-7ed588dc-c6b6-4083-81f0-b2c8e2c9ebb1 req-853e76f2-1542-44ef-8592-be5b83dab0cd service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Received event network-changed-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1633.057082] env[61978]: DEBUG nova.compute.manager [req-7ed588dc-c6b6-4083-81f0-b2c8e2c9ebb1 req-853e76f2-1542-44ef-8592-be5b83dab0cd service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Refreshing instance network info cache due to event network-changed-1053461c-995c-4bdc-a58c-52c4b5d4d8a5. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1633.057360] env[61978]: DEBUG oslo_concurrency.lockutils [req-7ed588dc-c6b6-4083-81f0-b2c8e2c9ebb1 req-853e76f2-1542-44ef-8592-be5b83dab0cd service nova] Acquiring lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.057529] env[61978]: DEBUG oslo_concurrency.lockutils [req-7ed588dc-c6b6-4083-81f0-b2c8e2c9ebb1 req-853e76f2-1542-44ef-8592-be5b83dab0cd service nova] Acquired lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.057731] env[61978]: DEBUG nova.network.neutron [req-7ed588dc-c6b6-4083-81f0-b2c8e2c9ebb1 req-853e76f2-1542-44ef-8592-be5b83dab0cd service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Refreshing network info cache for port 1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1633.164185] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396283, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07199} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.164527] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Extended root virtual disk {{(pid=61978) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1633.167016] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a82ca5-84d6-4362-9a25-4b62dbb95a69 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.191454] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e/a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1633.192373] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a8cd8d8-4e44-425c-b9db-df009cc35717 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.213680] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1633.213680] env[61978]: value = "task-1396284" [ 1633.213680] env[61978]: _type = "Task" [ 1633.213680] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.221271] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396284, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.375372] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1633.375545] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.817s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.375810] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.137s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.376069] env[61978]: DEBUG nova.objects.instance [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lazy-loading 'resources' on Instance uuid a48d5ef1-b66b-429e-bbff-2351ad5eda32 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1633.726218] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396284, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.786620] env[61978]: DEBUG nova.network.neutron [req-7ed588dc-c6b6-4083-81f0-b2c8e2c9ebb1 req-853e76f2-1542-44ef-8592-be5b83dab0cd service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updated VIF entry in instance network info cache for port 1053461c-995c-4bdc-a58c-52c4b5d4d8a5. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1633.787030] env[61978]: DEBUG nova.network.neutron [req-7ed588dc-c6b6-4083-81f0-b2c8e2c9ebb1 req-853e76f2-1542-44ef-8592-be5b83dab0cd service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updating instance_info_cache with network_info: [{"id": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "address": "fa:16:3e:ae:0d:68", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap1053461c-99", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.878913] env[61978]: DEBUG nova.objects.instance [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lazy-loading 'numa_topology' on Instance uuid a48d5ef1-b66b-429e-bbff-2351ad5eda32 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1634.223870] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396284, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.289710] env[61978]: DEBUG oslo_concurrency.lockutils [req-7ed588dc-c6b6-4083-81f0-b2c8e2c9ebb1 req-853e76f2-1542-44ef-8592-be5b83dab0cd service nova] Releasing lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.382036] env[61978]: DEBUG nova.objects.base [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=61978) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1634.419017] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904eae77-9e2c-4347-83f3-b190ff398717 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.427103] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca66596-f034-4d9f-8b34-40fb382efc3a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.458705] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a58b1c5-f262-4e00-93c7-ee6ed85b3e34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.469114] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09303d71-29a1-4396-944f-61677855273f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.484140] env[61978]: DEBUG nova.compute.provider_tree [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1634.723977] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396284, 'name': ReconfigVM_Task, 'duration_secs': 1.263907} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.724261] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Reconfigured VM instance instance-0000007e to attach disk [datastore2] a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e/a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e.vmdk or device None with type sparse {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1634.724896] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f37598a-f952-4c6f-9611-8def52623e47 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.730857] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1634.730857] env[61978]: value = "task-1396285" [ 1634.730857] env[61978]: _type = "Task" [ 1634.730857] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.737875] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396285, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.965367] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.986926] env[61978]: DEBUG nova.scheduler.client.report [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1635.240340] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396285, 'name': Rename_Task, 'duration_secs': 0.124353} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.240683] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1635.240843] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb5fb6c2-47b6-49ca-b13f-ff73a624dd5b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.246381] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1635.246381] env[61978]: value = "task-1396286" [ 1635.246381] env[61978]: _type = "Task" [ 1635.246381] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.254831] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396286, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.491682] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.116s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.755721] env[61978]: DEBUG oslo_vmware.api [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396286, 'name': PowerOnVM_Task, 'duration_secs': 0.435062} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.755956] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1635.756220] env[61978]: INFO nova.compute.manager [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Took 9.02 seconds to spawn the instance on the hypervisor. [ 1635.756413] env[61978]: DEBUG nova.compute.manager [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1635.757205] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe555e26-7dd0-421b-9c1f-c2d43d97eb4e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.000192] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8760a77a-cec2-4e1f-8d19-ab5f39e3d2a1 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.859s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.001144] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.036s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.001336] env[61978]: INFO nova.compute.manager [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Unshelving [ 1636.275434] env[61978]: INFO nova.compute.manager [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Took 13.69 seconds to build instance. [ 1636.737802] env[61978]: DEBUG nova.compute.manager [req-0570c70d-1ba7-40e9-9b9b-a55aec27af7f req-606a33a7-a0b0-4265-b8d1-7ee2d0413d61 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Received event network-changed-aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1636.738023] env[61978]: DEBUG nova.compute.manager [req-0570c70d-1ba7-40e9-9b9b-a55aec27af7f req-606a33a7-a0b0-4265-b8d1-7ee2d0413d61 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Refreshing instance network info cache due to event network-changed-aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1636.738257] env[61978]: DEBUG oslo_concurrency.lockutils [req-0570c70d-1ba7-40e9-9b9b-a55aec27af7f req-606a33a7-a0b0-4265-b8d1-7ee2d0413d61 service nova] Acquiring lock "refresh_cache-a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1636.738412] env[61978]: DEBUG oslo_concurrency.lockutils [req-0570c70d-1ba7-40e9-9b9b-a55aec27af7f req-606a33a7-a0b0-4265-b8d1-7ee2d0413d61 service nova] Acquired lock "refresh_cache-a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1636.738590] env[61978]: DEBUG nova.network.neutron [req-0570c70d-1ba7-40e9-9b9b-a55aec27af7f req-606a33a7-a0b0-4265-b8d1-7ee2d0413d61 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Refreshing network info cache for port aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1636.777596] env[61978]: DEBUG oslo_concurrency.lockutils [None req-9bd69e98-b1aa-42c1-9513-b06d9b046c6c tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.202s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.029924] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.030303] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.030529] env[61978]: DEBUG nova.objects.instance [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lazy-loading 'pci_requests' on Instance uuid a48d5ef1-b66b-429e-bbff-2351ad5eda32 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1637.443664] env[61978]: DEBUG nova.network.neutron [req-0570c70d-1ba7-40e9-9b9b-a55aec27af7f req-606a33a7-a0b0-4265-b8d1-7ee2d0413d61 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Updated VIF entry in instance network info cache for port aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1637.444077] env[61978]: DEBUG nova.network.neutron [req-0570c70d-1ba7-40e9-9b9b-a55aec27af7f req-606a33a7-a0b0-4265-b8d1-7ee2d0413d61 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Updating instance_info_cache with network_info: [{"id": "aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d", "address": "fa:16:3e:88:c7:c5", "network": {"id": "b65f6c14-2588-4d96-a954-d45b7ede9d35", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-701006356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b289cdad1fe4ad38c5d987680be2367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "67921bdb-a7a0-46b5-ba05-ca997496e222", "external-id": "nsx-vlan-transportzone-856", "segmentation_id": 856, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa9e6b39-2f", "ovs_interfaceid": "aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1637.534448] env[61978]: DEBUG nova.objects.instance [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lazy-loading 'numa_topology' on Instance uuid a48d5ef1-b66b-429e-bbff-2351ad5eda32 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1637.946836] env[61978]: DEBUG oslo_concurrency.lockutils [req-0570c70d-1ba7-40e9-9b9b-a55aec27af7f req-606a33a7-a0b0-4265-b8d1-7ee2d0413d61 service nova] Releasing lock "refresh_cache-a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.036661] env[61978]: INFO nova.compute.claims [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1639.085586] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df3f94a-1a7f-4928-be6b-055dc5854b03 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.092966] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9957d86d-511b-4192-9d48-cf1821f8c1bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.121391] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa56cc5-b808-4c8e-ac98-8b4bdeac4db4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.128099] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d58e972-5d46-4cba-8a04-ce8fba9f1176 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.140703] env[61978]: DEBUG nova.compute.provider_tree [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1639.643464] env[61978]: DEBUG nova.scheduler.client.report [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1640.148327] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.118s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.177413] env[61978]: INFO nova.network.neutron [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updating port 1053461c-995c-4bdc-a58c-52c4b5d4d8a5 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1641.560872] env[61978]: DEBUG nova.compute.manager [req-e3dc3daa-ef7c-468c-ac69-ab3fd8dcc01c req-a8a0ee10-b225-4e51-93c1-5316da83702d service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Received event network-vif-plugged-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1641.561158] env[61978]: DEBUG oslo_concurrency.lockutils [req-e3dc3daa-ef7c-468c-ac69-ab3fd8dcc01c req-a8a0ee10-b225-4e51-93c1-5316da83702d service nova] Acquiring lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.561334] env[61978]: DEBUG oslo_concurrency.lockutils [req-e3dc3daa-ef7c-468c-ac69-ab3fd8dcc01c req-a8a0ee10-b225-4e51-93c1-5316da83702d service nova] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.561408] env[61978]: DEBUG oslo_concurrency.lockutils [req-e3dc3daa-ef7c-468c-ac69-ab3fd8dcc01c req-a8a0ee10-b225-4e51-93c1-5316da83702d service nova] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.561615] env[61978]: DEBUG nova.compute.manager [req-e3dc3daa-ef7c-468c-ac69-ab3fd8dcc01c req-a8a0ee10-b225-4e51-93c1-5316da83702d service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] No waiting events found dispatching network-vif-plugged-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1641.561771] env[61978]: WARNING nova.compute.manager [req-e3dc3daa-ef7c-468c-ac69-ab3fd8dcc01c req-a8a0ee10-b225-4e51-93c1-5316da83702d service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Received unexpected event network-vif-plugged-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 for instance with vm_state shelved_offloaded and task_state spawning. [ 1641.653254] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.653521] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.654882] env[61978]: DEBUG nova.network.neutron [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Building network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1642.357430] env[61978]: DEBUG nova.network.neutron [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updating instance_info_cache with network_info: [{"id": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "address": "fa:16:3e:ae:0d:68", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1053461c-99", "ovs_interfaceid": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1642.859819] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.887791] env[61978]: DEBUG nova.virt.hardware [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-04T14:56:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1af2051d9481575a5a830d82c0486495',container_format='bare',created_at=2024-11-04T15:16:17Z,direct_url=,disk_format='vmdk',id=0694b3db-45b7-4f54-933b-d14fc9f9453d,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-728924508-shelved',owner='a8f40d19e7c74ade886c322a78583545',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2024-11-04T15:16:30Z,virtual_size=,visibility=), allow threads: False {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1642.888047] env[61978]: DEBUG nova.virt.hardware [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Flavor limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1642.888226] env[61978]: DEBUG nova.virt.hardware [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Image limits 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1642.888422] env[61978]: DEBUG nova.virt.hardware [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Flavor pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1642.888577] env[61978]: DEBUG nova.virt.hardware [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Image pref 0:0:0 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1642.888730] env[61978]: DEBUG nova.virt.hardware [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61978) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1642.888950] env[61978]: DEBUG nova.virt.hardware [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1642.889129] env[61978]: DEBUG nova.virt.hardware [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1642.889333] env[61978]: DEBUG nova.virt.hardware [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Got 1 possible topologies {{(pid=61978) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1642.889511] env[61978]: DEBUG nova.virt.hardware [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1642.889693] env[61978]: DEBUG nova.virt.hardware [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61978) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1642.890894] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71753c63-c900-4045-bbbb-4727cf99833a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.899007] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6080eee5-5736-4a2d-a315-5a5459bef76f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.911865] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:0d:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1053461c-995c-4bdc-a58c-52c4b5d4d8a5', 'vif_model': 'vmxnet3'}] {{(pid=61978) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1642.918930] env[61978]: DEBUG oslo.service.loopingcall [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1642.919178] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Creating VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1642.919407] env[61978]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e61d8242-ce17-44fd-a1b9-e530893832b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.938698] env[61978]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1642.938698] env[61978]: value = "task-1396287" [ 1642.938698] env[61978]: _type = "Task" [ 1642.938698] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.950492] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396287, 'name': CreateVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.449150] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396287, 'name': CreateVM_Task} progress is 25%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.586286] env[61978]: DEBUG nova.compute.manager [req-e02f3099-fb14-470c-a085-ad0c77d2f37c req-382244c2-efa4-4cc2-b962-b11e318f403a service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Received event network-changed-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1643.586507] env[61978]: DEBUG nova.compute.manager [req-e02f3099-fb14-470c-a085-ad0c77d2f37c req-382244c2-efa4-4cc2-b962-b11e318f403a service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Refreshing instance network info cache due to event network-changed-1053461c-995c-4bdc-a58c-52c4b5d4d8a5. {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1643.586728] env[61978]: DEBUG oslo_concurrency.lockutils [req-e02f3099-fb14-470c-a085-ad0c77d2f37c req-382244c2-efa4-4cc2-b962-b11e318f403a service nova] Acquiring lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.586875] env[61978]: DEBUG oslo_concurrency.lockutils [req-e02f3099-fb14-470c-a085-ad0c77d2f37c req-382244c2-efa4-4cc2-b962-b11e318f403a service nova] Acquired lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.587052] env[61978]: DEBUG nova.network.neutron [req-e02f3099-fb14-470c-a085-ad0c77d2f37c req-382244c2-efa4-4cc2-b962-b11e318f403a service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Refreshing network info cache for port 1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1643.949208] env[61978]: DEBUG oslo_vmware.api [-] Task: {'id': task-1396287, 'name': CreateVM_Task, 'duration_secs': 0.643647} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.949471] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Created VM on the ESX host {{(pid=61978) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1643.950107] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.950308] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.950693] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1643.950942] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12d39aa1-70b2-4937-a1f1-95e6bd7ca39b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.955038] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1643.955038] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]528c06e3-1f5d-2c54-dfb2-dc68be36ece4" [ 1643.955038] env[61978]: _type = "Task" [ 1643.955038] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.962119] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]528c06e3-1f5d-2c54-dfb2-dc68be36ece4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.282131] env[61978]: DEBUG nova.network.neutron [req-e02f3099-fb14-470c-a085-ad0c77d2f37c req-382244c2-efa4-4cc2-b962-b11e318f403a service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updated VIF entry in instance network info cache for port 1053461c-995c-4bdc-a58c-52c4b5d4d8a5. {{(pid=61978) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1644.282511] env[61978]: DEBUG nova.network.neutron [req-e02f3099-fb14-470c-a085-ad0c77d2f37c req-382244c2-efa4-4cc2-b962-b11e318f403a service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updating instance_info_cache with network_info: [{"id": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "address": "fa:16:3e:ae:0d:68", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1053461c-99", "ovs_interfaceid": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.464878] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.465143] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Processing image 0694b3db-45b7-4f54-933b-d14fc9f9453d {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1644.465385] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d/0694b3db-45b7-4f54-933b-d14fc9f9453d.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1644.465541] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d/0694b3db-45b7-4f54-933b-d14fc9f9453d.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.465737] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1644.466027] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-336cab16-3dd8-4efb-8f77-995845291a63 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.473771] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1644.473951] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61978) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1644.474653] env[61978]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e8a9ca0-f292-40d8-a16d-dc567edc0549 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.479583] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1644.479583] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]52908d9f-61ea-bba4-d4b0-a8d319e32b07" [ 1644.479583] env[61978]: _type = "Task" [ 1644.479583] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.488381] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': session[52499f00-04f1-7b6f-00fd-7545db7737b2]52908d9f-61ea-bba4-d4b0-a8d319e32b07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.785430] env[61978]: DEBUG oslo_concurrency.lockutils [req-e02f3099-fb14-470c-a085-ad0c77d2f37c req-382244c2-efa4-4cc2-b962-b11e318f403a service nova] Releasing lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.990898] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Preparing fetch location {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1644.991270] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Fetch image to [datastore2] OSTACK_IMG_46fb9afe-be89-4cd0-9682-ecf61d975108/OSTACK_IMG_46fb9afe-be89-4cd0-9682-ecf61d975108.vmdk {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1644.991375] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Downloading stream optimized image 0694b3db-45b7-4f54-933b-d14fc9f9453d to [datastore2] OSTACK_IMG_46fb9afe-be89-4cd0-9682-ecf61d975108/OSTACK_IMG_46fb9afe-be89-4cd0-9682-ecf61d975108.vmdk on the data store datastore2 as vApp {{(pid=61978) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1644.991526] env[61978]: DEBUG nova.virt.vmwareapi.images [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Downloading image file data 0694b3db-45b7-4f54-933b-d14fc9f9453d to the ESX as VM named 'OSTACK_IMG_46fb9afe-be89-4cd0-9682-ecf61d975108' {{(pid=61978) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1645.056201] env[61978]: DEBUG oslo_vmware.rw_handles [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1645.056201] env[61978]: value = "resgroup-9" [ 1645.056201] env[61978]: _type = "ResourcePool" [ 1645.056201] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1645.056511] env[61978]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-0f75b3fe-4303-4d9e-ad0d-c0a5a3175cd5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.075972] env[61978]: DEBUG oslo_vmware.rw_handles [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lease: (returnval){ [ 1645.075972] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523b4311-c1ae-cc63-fb8f-9ec65fc05753" [ 1645.075972] env[61978]: _type = "HttpNfcLease" [ 1645.075972] env[61978]: } obtained for vApp import into resource pool (val){ [ 1645.075972] env[61978]: value = "resgroup-9" [ 1645.075972] env[61978]: _type = "ResourcePool" [ 1645.075972] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1645.076358] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the lease: (returnval){ [ 1645.076358] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523b4311-c1ae-cc63-fb8f-9ec65fc05753" [ 1645.076358] env[61978]: _type = "HttpNfcLease" [ 1645.076358] env[61978]: } to be ready. {{(pid=61978) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1645.082131] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1645.082131] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523b4311-c1ae-cc63-fb8f-9ec65fc05753" [ 1645.082131] env[61978]: _type = "HttpNfcLease" [ 1645.082131] env[61978]: } is initializing. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1645.584419] env[61978]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1645.584419] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523b4311-c1ae-cc63-fb8f-9ec65fc05753" [ 1645.584419] env[61978]: _type = "HttpNfcLease" [ 1645.584419] env[61978]: } is ready. {{(pid=61978) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1645.584903] env[61978]: DEBUG oslo_vmware.rw_handles [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1645.584903] env[61978]: value = "session[52499f00-04f1-7b6f-00fd-7545db7737b2]523b4311-c1ae-cc63-fb8f-9ec65fc05753" [ 1645.584903] env[61978]: _type = "HttpNfcLease" [ 1645.584903] env[61978]: }. {{(pid=61978) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1645.585604] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25405c36-2f4b-4c62-8e75-97e804e4b8ba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.592350] env[61978]: DEBUG oslo_vmware.rw_handles [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526bffb2-8010-29dd-5f4c-1f66542cf51d/disk-0.vmdk from lease info. {{(pid=61978) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1645.592525] env[61978]: DEBUG oslo_vmware.rw_handles [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526bffb2-8010-29dd-5f4c-1f66542cf51d/disk-0.vmdk. {{(pid=61978) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1645.655602] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4b50a9e7-15e5-473a-827d-d47a2c0bbd72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.737357] env[61978]: DEBUG oslo_vmware.rw_handles [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Completed reading data from the image iterator. {{(pid=61978) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1646.737749] env[61978]: DEBUG oslo_vmware.rw_handles [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526bffb2-8010-29dd-5f4c-1f66542cf51d/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1646.738545] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bb7b63-bff4-4efc-a98f-9acd877c9a85 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.745426] env[61978]: DEBUG oslo_vmware.rw_handles [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526bffb2-8010-29dd-5f4c-1f66542cf51d/disk-0.vmdk is in state: ready. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1646.745597] env[61978]: DEBUG oslo_vmware.rw_handles [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526bffb2-8010-29dd-5f4c-1f66542cf51d/disk-0.vmdk. {{(pid=61978) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1646.745822] env[61978]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-bca1993b-24e2-44b7-a5eb-3266c00df301 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.971029] env[61978]: DEBUG oslo_vmware.rw_handles [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526bffb2-8010-29dd-5f4c-1f66542cf51d/disk-0.vmdk. {{(pid=61978) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1646.971196] env[61978]: INFO nova.virt.vmwareapi.images [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Downloaded image file data 0694b3db-45b7-4f54-933b-d14fc9f9453d [ 1646.972075] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702484b2-e8fb-4628-8faf-e060abd1fc5d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.986708] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35dafcef-0ceb-4b74-ba41-30b9e1388ab8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.018031] env[61978]: INFO nova.virt.vmwareapi.images [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] The imported VM was unregistered [ 1647.020452] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Caching image {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1647.020695] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Creating directory with path [datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1647.020970] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2ba1268-d85a-4d9b-bc4e-9744bfa9dbd0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.063202] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Created directory with path [datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d {{(pid=61978) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1647.063403] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_46fb9afe-be89-4cd0-9682-ecf61d975108/OSTACK_IMG_46fb9afe-be89-4cd0-9682-ecf61d975108.vmdk to [datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d/0694b3db-45b7-4f54-933b-d14fc9f9453d.vmdk. {{(pid=61978) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1647.063674] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-367494c5-4b5a-4e92-9538-26d32b10a1e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.070069] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1647.070069] env[61978]: value = "task-1396290" [ 1647.070069] env[61978]: _type = "Task" [ 1647.070069] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.077439] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396290, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.582370] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396290, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.082331] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396290, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.584409] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396290, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.084071] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396290, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.582939] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396290, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.14289} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.583239] env[61978]: INFO nova.virt.vmwareapi.ds_util [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_46fb9afe-be89-4cd0-9682-ecf61d975108/OSTACK_IMG_46fb9afe-be89-4cd0-9682-ecf61d975108.vmdk to [datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d/0694b3db-45b7-4f54-933b-d14fc9f9453d.vmdk. [ 1649.583437] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Cleaning up location [datastore2] OSTACK_IMG_46fb9afe-be89-4cd0-9682-ecf61d975108 {{(pid=61978) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1649.583607] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_46fb9afe-be89-4cd0-9682-ecf61d975108 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1649.583865] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00cf2973-8b8b-4a9d-8a0e-c06da7565339 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.590327] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1649.590327] env[61978]: value = "task-1396291" [ 1649.590327] env[61978]: _type = "Task" [ 1649.590327] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.597600] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396291, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.100576] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396291, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033054} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.100972] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1650.101046] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d/0694b3db-45b7-4f54-933b-d14fc9f9453d.vmdk" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.101272] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d/0694b3db-45b7-4f54-933b-d14fc9f9453d.vmdk to [datastore2] a48d5ef1-b66b-429e-bbff-2351ad5eda32/a48d5ef1-b66b-429e-bbff-2351ad5eda32.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1650.101527] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2288a147-58ad-4128-82be-d3319f436a81 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.107541] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1650.107541] env[61978]: value = "task-1396292" [ 1650.107541] env[61978]: _type = "Task" [ 1650.107541] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.115610] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.619825] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396292, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.120810] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396292, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.621628] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396292, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.122345] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396292, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.621406] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396292, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.452283} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.621692] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d/0694b3db-45b7-4f54-933b-d14fc9f9453d.vmdk to [datastore2] a48d5ef1-b66b-429e-bbff-2351ad5eda32/a48d5ef1-b66b-429e-bbff-2351ad5eda32.vmdk {{(pid=61978) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1652.622494] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32aa777-b529-424c-98bd-2ed81d7d0c07 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.643574] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] a48d5ef1-b66b-429e-bbff-2351ad5eda32/a48d5ef1-b66b-429e-bbff-2351ad5eda32.vmdk or device None with type streamOptimized {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1652.643847] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10abf2db-54eb-4dd4-bd3b-f0699d7517fa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.662426] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1652.662426] env[61978]: value = "task-1396293" [ 1652.662426] env[61978]: _type = "Task" [ 1652.662426] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.669694] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396293, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.173076] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396293, 'name': ReconfigVM_Task, 'duration_secs': 0.300569} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.173464] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Reconfigured VM instance instance-0000007b to attach disk [datastore2] a48d5ef1-b66b-429e-bbff-2351ad5eda32/a48d5ef1-b66b-429e-bbff-2351ad5eda32.vmdk or device None with type streamOptimized {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1653.174104] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c4b5898-a0f9-416d-9a47-1d12e87fc518 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.180305] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1653.180305] env[61978]: value = "task-1396294" [ 1653.180305] env[61978]: _type = "Task" [ 1653.180305] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.188170] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396294, 'name': Rename_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.694725] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396294, 'name': Rename_Task, 'duration_secs': 0.145693} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.695188] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Powering on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1653.695562] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f8afd92-b31a-4fb4-8a06-0384dc4248d7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.703316] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1653.703316] env[61978]: value = "task-1396295" [ 1653.703316] env[61978]: _type = "Task" [ 1653.703316] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.714400] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396295, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.212891] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396295, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.713762] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396295, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.214443] env[61978]: DEBUG oslo_vmware.api [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396295, 'name': PowerOnVM_Task, 'duration_secs': 1.026893} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.214836] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Powered on the VM {{(pid=61978) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1655.313068] env[61978]: DEBUG nova.compute.manager [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Checking state {{(pid=61978) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1655.314007] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680d5d1b-ff54-4f83-83ef-aac84dfd78ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.556704] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.556865] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Cleaning up deleted instances with incomplete migration {{(pid=61978) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1655.832563] env[61978]: DEBUG oslo_concurrency.lockutils [None req-b9cd02b2-7353-495c-8de8-b30e443d3a92 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.831s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.054865] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.556401] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.557023] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.557456] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.556185] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.556429] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.556585] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.556729] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1668.556874] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1669.060613] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.061017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.061017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.061185] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1669.062139] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c6dc6c-8326-47c4-aab4-98bf8e1f7fb5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.070499] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e7c1d2-f55c-488a-b01d-ffc5fb9156f9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.084163] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce0e4de-74fa-4d14-8b88-cc5e096f118b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.090037] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693efd64-3161-42d3-bd48-ec8684c14a2f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.117223] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180860MB free_disk=185GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1669.117357] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.117576] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.141323] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1670.141657] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Instance a48d5ef1-b66b-429e-bbff-2351ad5eda32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61978) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1670.141657] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1670.141657] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1670.179355] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43baa8c2-1221-410b-89a7-9cbd2de4bd2c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.186869] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd891cf-7b4b-489e-8ec3-5cb7d721e9ef {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.215367] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60dea7c3-87c2-4598-845d-be1022fa5fd0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.222242] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8035fb-8d20-4e22-9ba9-7f26854ca20f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.235301] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1670.755203] env[61978]: ERROR nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [req-58b21106-8239-42f9-9616-dc080255d78d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 44209228-3464-48ae-bc40-83eccd44b0cf. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-58b21106-8239-42f9-9616-dc080255d78d"}]} [ 1670.770807] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1670.782376] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1670.782547] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1670.791755] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1670.807880] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1670.839862] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35375e9-2388-4117-be75-0546f5f0761e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.847098] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e5879b-f6fe-47b6-85a5-bf164e54ecbd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.876537] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c2e1a8-d3f5-424d-9d4b-7461ac8cd5ee {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.883381] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fed369-5ee6-4845-8f2b-fc4c2efb90e0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.895792] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1671.424871] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 182 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1671.425203] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 182 to 183 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1671.425258] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 185, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1671.930563] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1671.930749] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.813s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.931181] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1671.931322] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Cleaning up deleted instances {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1672.438892] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] There are 20 instances to clean {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1672.439316] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 03bd4306-195f-44f8-a35c-32869baff416] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1672.942786] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 6799770e-a132-4fd1-8c7c-2e0bc0bf9cfa] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1673.445932] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: d542b9ef-a4d8-4dad-8b97-b9e67372e214] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1673.949812] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: bc2d4609-bb75-48e7-859b-7cbb02041f52] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1674.453229] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: d587cf18-1558-4e01-be53-3b7bf8287fdd] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1674.858604] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.858864] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.955951] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: fe81b6d2-053f-4db4-b3dd-a67b21d02c52] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1675.362879] env[61978]: DEBUG nova.compute.utils [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Using /dev/sd instead of None {{(pid=61978) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1675.458642] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 116a3384-8bf3-49c6-9ee0-01d2781c69d5] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1675.865828] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.961710] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 77a8cde0-b046-4970-9979-9d4b85a224e2] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1676.465212] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 733c2f53-04d3-4a8b-a7c1-5194d7961a31] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1676.927906] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1676.928154] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.928400] env[61978]: INFO nova.compute.manager [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Attaching volume 2e635dc0-1498-4f3f-ad37-528969985e29 to /dev/sdb [ 1676.958150] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab863dd4-0266-40f4-b15f-34cc3604c952 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.965134] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3117aed-7323-4c6b-b791-d0d72e4c7711 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.967821] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 31c61275-c058-4c3e-8580-0958489d01a0] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1676.979753] env[61978]: DEBUG nova.virt.block_device [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Updating existing volume attachment record: 402d4732-d184-4f0c-9d43-487efeb1a2cc {{(pid=61978) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1677.470852] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a6f73332-d0a5-4c52-8e38-8982e42ee62f] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1677.974644] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 86f776d7-1ace-4e3c-8fa9-1562b97c832c] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1678.477838] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 6a454083-8d85-4a29-98dc-29eb0a072560] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1678.981300] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 30d6cc11-0258-47aa-b083-7c103c91acf2] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1679.485437] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 1ec56ce5-c580-4369-ac0a-59c0782ac570] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1679.988808] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: ce71756d-7a11-46d4-a5dd-a5b720df83c6] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1680.492793] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: db960922-12b5-41e7-9de3-312136819bb0] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1680.996060] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: ac1676dd-affa-49cd-9e7b-a301abcec232] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1681.499706] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: fef5a0cf-ac34-4e2e-8f6b-faa4db730bdd] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1681.523195] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Volume attach. Driver type: vmdk {{(pid=61978) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1681.523458] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296099', 'volume_id': '2e635dc0-1498-4f3f-ad37-528969985e29', 'name': 'volume-2e635dc0-1498-4f3f-ad37-528969985e29', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e', 'attached_at': '', 'detached_at': '', 'volume_id': '2e635dc0-1498-4f3f-ad37-528969985e29', 'serial': '2e635dc0-1498-4f3f-ad37-528969985e29'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1681.524572] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6065f18-eb9d-41e8-b5e9-bbc66a8545cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.541018] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6b0843-d212-4a8b-8419-c95dc9edc708 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.564234] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] volume-2e635dc0-1498-4f3f-ad37-528969985e29/volume-2e635dc0-1498-4f3f-ad37-528969985e29.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1681.564463] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f42578bf-15c4-4a99-b3f3-d376e16236eb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.581014] env[61978]: DEBUG oslo_vmware.api [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1681.581014] env[61978]: value = "task-1396298" [ 1681.581014] env[61978]: _type = "Task" [ 1681.581014] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.588190] env[61978]: DEBUG oslo_vmware.api [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396298, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.003278] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: 7823099f-efdf-46bf-85d7-69e105dfb02c] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1682.091019] env[61978]: DEBUG oslo_vmware.api [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396298, 'name': ReconfigVM_Task, 'duration_secs': 0.332714} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.091337] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Reconfigured VM instance instance-0000007e to attach disk [datastore2] volume-2e635dc0-1498-4f3f-ad37-528969985e29/volume-2e635dc0-1498-4f3f-ad37-528969985e29.vmdk or device None with type thin {{(pid=61978) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1682.095798] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bae9f5fb-f1d2-4e78-9178-09a33af9945a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.109972] env[61978]: DEBUG oslo_vmware.api [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1682.109972] env[61978]: value = "task-1396299" [ 1682.109972] env[61978]: _type = "Task" [ 1682.109972] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.117122] env[61978]: DEBUG oslo_vmware.api [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396299, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.507143] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1682.620838] env[61978]: DEBUG oslo_vmware.api [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396299, 'name': ReconfigVM_Task, 'duration_secs': 0.137838} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.621166] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296099', 'volume_id': '2e635dc0-1498-4f3f-ad37-528969985e29', 'name': 'volume-2e635dc0-1498-4f3f-ad37-528969985e29', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e', 'attached_at': '', 'detached_at': '', 'volume_id': '2e635dc0-1498-4f3f-ad37-528969985e29', 'serial': '2e635dc0-1498-4f3f-ad37-528969985e29'} {{(pid=61978) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1683.655951] env[61978]: DEBUG nova.objects.instance [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lazy-loading 'flavor' on Instance uuid a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1684.162601] env[61978]: DEBUG oslo_concurrency.lockutils [None req-6fdb7530-1ed5-45d4-8e56-0f4f34bfef33 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.234s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.329758] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.330089] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.832960] env[61978]: INFO nova.compute.manager [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Detaching volume 2e635dc0-1498-4f3f-ad37-528969985e29 [ 1684.863199] env[61978]: INFO nova.virt.block_device [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Attempting to driver detach volume 2e635dc0-1498-4f3f-ad37-528969985e29 from mountpoint /dev/sdb [ 1684.863199] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Volume detach. Driver type: vmdk {{(pid=61978) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1684.863199] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296099', 'volume_id': '2e635dc0-1498-4f3f-ad37-528969985e29', 'name': 'volume-2e635dc0-1498-4f3f-ad37-528969985e29', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e', 'attached_at': '', 'detached_at': '', 'volume_id': '2e635dc0-1498-4f3f-ad37-528969985e29', 'serial': '2e635dc0-1498-4f3f-ad37-528969985e29'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1684.863822] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b214d427-7d8e-4da6-ba52-f30b9f195854 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.884159] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f6de76-a001-4324-b12f-727abb4f5c09 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.890511] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37567333-0f9c-41a4-9856-f3578d7d7bb8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.909403] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2c2897-9814-4f7f-b4b1-f0331b8dbf8f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.924008] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] The volume has not been displaced from its original location: [datastore2] volume-2e635dc0-1498-4f3f-ad37-528969985e29/volume-2e635dc0-1498-4f3f-ad37-528969985e29.vmdk. No consolidation needed. {{(pid=61978) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1684.929140] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Reconfiguring VM instance instance-0000007e to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1684.929421] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2be3e8a-51f5-49ff-857a-b84bfe4ebcfc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.946132] env[61978]: DEBUG oslo_vmware.api [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1684.946132] env[61978]: value = "task-1396300" [ 1684.946132] env[61978]: _type = "Task" [ 1684.946132] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.953518] env[61978]: DEBUG oslo_vmware.api [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396300, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.009302] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1685.456171] env[61978]: DEBUG oslo_vmware.api [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396300, 'name': ReconfigVM_Task, 'duration_secs': 0.209216} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.456461] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Reconfigured VM instance instance-0000007e to detach disk 2001 {{(pid=61978) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1685.461126] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b35a141e-7c28-4336-9f00-bddead3870dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.475928] env[61978]: DEBUG oslo_vmware.api [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1685.475928] env[61978]: value = "task-1396301" [ 1685.475928] env[61978]: _type = "Task" [ 1685.475928] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.483914] env[61978]: DEBUG oslo_vmware.api [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396301, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.514480] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1685.514624] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1685.514745] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1685.986322] env[61978]: DEBUG oslo_vmware.api [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.045674] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.045860] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquired lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.046023] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Forcefully refreshing network info cache for instance {{(pid=61978) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1686.046182] env[61978]: DEBUG nova.objects.instance [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lazy-loading 'info_cache' on Instance uuid a48d5ef1-b66b-429e-bbff-2351ad5eda32 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1686.487829] env[61978]: DEBUG oslo_vmware.api [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396301, 'name': ReconfigVM_Task, 'duration_secs': 0.746364} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.488138] env[61978]: DEBUG nova.virt.vmwareapi.volumeops [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-296099', 'volume_id': '2e635dc0-1498-4f3f-ad37-528969985e29', 'name': 'volume-2e635dc0-1498-4f3f-ad37-528969985e29', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e', 'attached_at': '', 'detached_at': '', 'volume_id': '2e635dc0-1498-4f3f-ad37-528969985e29', 'serial': '2e635dc0-1498-4f3f-ad37-528969985e29'} {{(pid=61978) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1687.027193] env[61978]: DEBUG nova.objects.instance [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lazy-loading 'flavor' on Instance uuid a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1687.771544] env[61978]: DEBUG nova.network.neutron [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updating instance_info_cache with network_info: [{"id": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "address": "fa:16:3e:ae:0d:68", "network": {"id": "1200e62b-3b3a-40e1-98a2-c33a17573a38", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-2039134840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8f40d19e7c74ade886c322a78583545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1053461c-99", "ovs_interfaceid": "1053461c-995c-4bdc-a58c-52c4b5d4d8a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.034570] env[61978]: DEBUG oslo_concurrency.lockutils [None req-8123db43-ca0e-4514-bc2e-0750bd08dfd8 tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.704s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.274052] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Releasing lock "refresh_cache-a48d5ef1-b66b-429e-bbff-2351ad5eda32" {{(pid=61978) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.274246] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updated the network info_cache for instance {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1688.274462] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_power_states {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1688.572662] env[61978]: DEBUG oslo_concurrency.lockutils [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.572869] env[61978]: DEBUG oslo_concurrency.lockutils [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.573120] env[61978]: DEBUG oslo_concurrency.lockutils [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.573320] env[61978]: DEBUG oslo_concurrency.lockutils [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.573496] env[61978]: DEBUG oslo_concurrency.lockutils [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.575598] env[61978]: INFO nova.compute.manager [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Terminating instance [ 1688.577338] env[61978]: DEBUG nova.compute.manager [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1688.577568] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1688.578410] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8102e565-8b3c-480e-9901-c6ff81bca651 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.585624] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1688.585844] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e3e2249-8f51-45f8-a527-9b6bedf33f4a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.591953] env[61978]: DEBUG oslo_vmware.api [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1688.591953] env[61978]: value = "task-1396302" [ 1688.591953] env[61978]: _type = "Task" [ 1688.591953] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.599241] env[61978]: DEBUG oslo_vmware.api [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.777878] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Getting list of instances from cluster (obj){ [ 1688.777878] env[61978]: value = "domain-c8" [ 1688.777878] env[61978]: _type = "ClusterComputeResource" [ 1688.777878] env[61978]: } {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1688.778946] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e419b9-7efd-47c1-87fe-f7dcec7c704c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.790269] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Got total of 2 instances {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1688.790436] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid a48d5ef1-b66b-429e-bbff-2351ad5eda32 {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1688.790634] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Triggering sync for uuid a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e {{(pid=61978) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1688.790957] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.791191] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.791453] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.792272] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995aea77-c8fb-4780-a8b4-1181676c1fa4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.101449] env[61978]: DEBUG oslo_vmware.api [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396302, 'name': PowerOffVM_Task, 'duration_secs': 0.189861} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.101804] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1689.101863] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1689.102116] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b572d0a9-2b1d-4c36-baac-57d91059a17a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.161179] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1689.161423] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1689.161587] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Deleting the datastore file [datastore2] a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1689.161880] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9244ef27-c648-4c88-92f8-f0357c4218e6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.168806] env[61978]: DEBUG oslo_vmware.api [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for the task: (returnval){ [ 1689.168806] env[61978]: value = "task-1396304" [ 1689.168806] env[61978]: _type = "Task" [ 1689.168806] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.176749] env[61978]: DEBUG oslo_vmware.api [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.300658] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.509s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.679025] env[61978]: DEBUG oslo_vmware.api [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Task: {'id': task-1396304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128355} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.679318] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1689.679520] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1689.679736] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1689.679927] env[61978]: INFO nova.compute.manager [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1689.680198] env[61978]: DEBUG oslo.service.loopingcall [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1689.680403] env[61978]: DEBUG nova.compute.manager [-] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1689.680528] env[61978]: DEBUG nova.network.neutron [-] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1690.135556] env[61978]: DEBUG nova.compute.manager [req-3b9ad743-5514-48ba-9a3d-a8f0eaae273b req-fb8b1cb7-d12e-48bb-9fa8-6b81e1bc7003 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Received event network-vif-deleted-aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1690.135809] env[61978]: INFO nova.compute.manager [req-3b9ad743-5514-48ba-9a3d-a8f0eaae273b req-fb8b1cb7-d12e-48bb-9fa8-6b81e1bc7003 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Neutron deleted interface aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d; detaching it from the instance and deleting it from the info cache [ 1690.135958] env[61978]: DEBUG nova.network.neutron [req-3b9ad743-5514-48ba-9a3d-a8f0eaae273b req-fb8b1cb7-d12e-48bb-9fa8-6b81e1bc7003 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.609640] env[61978]: DEBUG nova.network.neutron [-] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.637824] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d06206f-d0a3-4b6a-8072-80a03df70484 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.649240] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9d5234-5b26-4855-b6a6-9c7dd7f3c084 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.673181] env[61978]: DEBUG nova.compute.manager [req-3b9ad743-5514-48ba-9a3d-a8f0eaae273b req-fb8b1cb7-d12e-48bb-9fa8-6b81e1bc7003 service nova] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Detach interface failed, port_id=aa9e6b39-2ff2-40f5-b3db-5ad9a2bd355d, reason: Instance a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1690.950139] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.950427] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.950648] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.950867] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.951062] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.953215] env[61978]: INFO nova.compute.manager [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Terminating instance [ 1690.954935] env[61978]: DEBUG nova.compute.manager [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Start destroying the instance on the hypervisor. {{(pid=61978) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1690.955156] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Destroying instance {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1690.955956] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2014790-5973-44be-be14-f135b2541413 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.963504] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Powering off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1690.963729] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e4847e8-3901-4c74-a6e7-9c83477ac4d8 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.968982] env[61978]: DEBUG oslo_vmware.api [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1690.968982] env[61978]: value = "task-1396305" [ 1690.968982] env[61978]: _type = "Task" [ 1690.968982] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.976836] env[61978]: DEBUG oslo_vmware.api [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396305, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.112791] env[61978]: INFO nova.compute.manager [-] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Took 1.43 seconds to deallocate network for instance. [ 1691.480157] env[61978]: DEBUG oslo_vmware.api [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396305, 'name': PowerOffVM_Task, 'duration_secs': 0.161945} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.480519] env[61978]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Powered off the VM {{(pid=61978) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1691.480622] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Unregistering the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1691.480883] env[61978]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50116ce9-2f4c-4285-811e-bfc478e7bb6c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.539602] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Unregistered the VM {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1691.539851] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Deleting contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1691.540070] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleting the datastore file [datastore2] a48d5ef1-b66b-429e-bbff-2351ad5eda32 {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1691.540345] env[61978]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b0f9e71-52c8-4ba3-a1a9-5bbea7999254 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.546978] env[61978]: DEBUG oslo_vmware.api [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for the task: (returnval){ [ 1691.546978] env[61978]: value = "task-1396307" [ 1691.546978] env[61978]: _type = "Task" [ 1691.546978] env[61978]: } to complete. {{(pid=61978) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.554170] env[61978]: DEBUG oslo_vmware.api [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396307, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.619675] env[61978]: DEBUG oslo_concurrency.lockutils [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.619675] env[61978]: DEBUG oslo_concurrency.lockutils [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.619900] env[61978]: DEBUG nova.objects.instance [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lazy-loading 'resources' on Instance uuid a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1692.057405] env[61978]: DEBUG oslo_vmware.api [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Task: {'id': task-1396307, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129096} completed successfully. {{(pid=61978) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.057734] env[61978]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleted the datastore file {{(pid=61978) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1692.057933] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Deleted contents of the VM from datastore datastore2 {{(pid=61978) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1692.058134] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Instance destroyed {{(pid=61978) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1692.058327] env[61978]: INFO nova.compute.manager [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1692.058580] env[61978]: DEBUG oslo.service.loopingcall [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61978) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1692.058783] env[61978]: DEBUG nova.compute.manager [-] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Deallocating network for instance {{(pid=61978) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1692.058879] env[61978]: DEBUG nova.network.neutron [-] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] deallocate_for_instance() {{(pid=61978) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1692.164451] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0010a83a-0272-478c-8c60-789207b5032d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.171946] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f6ff5e-bbb0-47fa-8d6d-f72243ecf58c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.204109] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79745a8b-3648-46a5-a473-6309e9c2a4f4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.211060] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38f35a5-f30d-4452-a5c2-5c86a74527a2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.223679] env[61978]: DEBUG nova.compute.provider_tree [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1692.519334] env[61978]: DEBUG nova.compute.manager [req-1788afc1-ba01-464b-8bf4-04c117bef181 req-6cb4dfd2-85e7-4c01-bb94-7f64d2024262 service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Received event network-vif-deleted-1053461c-995c-4bdc-a58c-52c4b5d4d8a5 {{(pid=61978) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1692.519614] env[61978]: INFO nova.compute.manager [req-1788afc1-ba01-464b-8bf4-04c117bef181 req-6cb4dfd2-85e7-4c01-bb94-7f64d2024262 service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Neutron deleted interface 1053461c-995c-4bdc-a58c-52c4b5d4d8a5; detaching it from the instance and deleting it from the info cache [ 1692.519614] env[61978]: DEBUG nova.network.neutron [req-1788afc1-ba01-464b-8bf4-04c117bef181 req-6cb4dfd2-85e7-4c01-bb94-7f64d2024262 service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1692.753420] env[61978]: DEBUG nova.scheduler.client.report [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Updated inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf with generation 183 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1692.753700] env[61978]: DEBUG nova.compute.provider_tree [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Updating resource provider 44209228-3464-48ae-bc40-83eccd44b0cf generation from 183 to 184 during operation: update_inventory {{(pid=61978) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1692.753891] env[61978]: DEBUG nova.compute.provider_tree [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1692.997769] env[61978]: DEBUG nova.network.neutron [-] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Updating instance_info_cache with network_info: [] {{(pid=61978) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.021638] env[61978]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2520357-e2c2-4aec-8042-43e84f3d4b07 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.031354] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f54869-25db-440f-b24b-fac79abe32c9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.056423] env[61978]: DEBUG nova.compute.manager [req-1788afc1-ba01-464b-8bf4-04c117bef181 req-6cb4dfd2-85e7-4c01-bb94-7f64d2024262 service nova] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Detach interface failed, port_id=1053461c-995c-4bdc-a58c-52c4b5d4d8a5, reason: Instance a48d5ef1-b66b-429e-bbff-2351ad5eda32 could not be found. {{(pid=61978) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1693.259017] env[61978]: DEBUG oslo_concurrency.lockutils [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.639s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.282286] env[61978]: INFO nova.scheduler.client.report [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Deleted allocations for instance a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e [ 1693.501058] env[61978]: INFO nova.compute.manager [-] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Took 1.44 seconds to deallocate network for instance. [ 1693.791266] env[61978]: DEBUG oslo_concurrency.lockutils [None req-36bf3485-eb82-44b9-b633-2c107815462b tempest-AttachVolumeNegativeTest-1755418241 tempest-AttachVolumeNegativeTest-1755418241-project-member] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.218s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.792076] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.793972] env[61978]: INFO nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1693.793972] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.007592] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.007592] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.007818] env[61978]: DEBUG nova.objects.instance [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lazy-loading 'resources' on Instance uuid a48d5ef1-b66b-429e-bbff-2351ad5eda32 {{(pid=61978) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1694.538725] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e06af5-777d-40ae-90c2-12e6a3cf75da {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.546514] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1447678f-d87a-4870-a1d6-327289cfec08 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.575173] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d145c1-76ae-4623-9c15-2335150bcfaa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.582023] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a72856-909f-4d79-aef8-b31892a84c0e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.594936] env[61978]: DEBUG nova.compute.provider_tree [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1695.098576] env[61978]: DEBUG nova.scheduler.client.report [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1695.605585] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.597s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.620818] env[61978]: INFO nova.scheduler.client.report [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Deleted allocations for instance a48d5ef1-b66b-429e-bbff-2351ad5eda32 [ 1696.129650] env[61978]: DEBUG oslo_concurrency.lockutils [None req-ff7dc8a8-55c5-4d93-92f7-84b6a020e182 tempest-AttachVolumeShelveTestJSON-900534277 tempest-AttachVolumeShelveTestJSON-900534277-project-member] Lock "a48d5ef1-b66b-429e-bbff-2351ad5eda32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.179s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.074111] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1724.074492] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.556595] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1729.556087] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1729.556439] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1729.556488] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1730.556377] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1730.556737] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1730.556737] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1731.060201] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.060445] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.060602] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.060755] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1731.061713] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871d9a9c-5055-46bc-87af-11e75156280f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.069544] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe47b0c1-a9c6-4191-97d9-aa266ae8e509 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.082885] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e84f79-b5e6-4351-8ff3-c204fba0f2dd {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.089069] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6c0eb2-fe1b-46a9-8f81-99df8c06b909 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.117962] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181228MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1731.118105] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.118291] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.264411] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1732.264763] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1732.277882] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5cbce0-8d21-4cfc-aa3e-91541f42757e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.285226] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab274960-3381-4ce6-885e-55daf4ed9e89 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.313482] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abfd4c8-3c68-4f07-9bae-b24d77684b08 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.319925] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee53cd56-b385-4292-aee1-4e57fae6c571 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.332208] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1732.835187] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1733.340277] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1733.340658] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.222s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.341133] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1735.341579] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1735.844206] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1784.055672] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1785.556925] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1788.557221] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1789.556394] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1789.556621] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1790.556485] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1790.556888] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1791.556628] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1791.556975] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1792.059950] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.060206] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.060418] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.060583] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1792.061530] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a08ce7-dfc4-4430-8c7c-fc384375ac20 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.069853] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcb6d35-bc5a-496d-920f-80c5e606f2f7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.083316] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969d39e2-7e84-4065-a28c-62ae432b3c7d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.089211] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0ca753-e37a-495d-9f1b-18bd0e5bb294 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.117502] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181483MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1792.117651] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.117827] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.136017] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1793.136272] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1793.149357] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c3fb78-0669-4bd8-9825-9303f5a8d034 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.157432] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3b6fe4-0322-45c0-8314-4dd65ee2fdf4 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.187485] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae1faad-4091-4f41-9bfb-635445b69df6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.194101] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e44c0d-b68a-45c5-88ac-e2406748fe63 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.206279] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1793.708968] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1793.710226] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1793.710444] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.710419] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1795.710741] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1795.710741] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1796.213582] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1797.055167] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1846.056420] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1847.557379] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1849.557639] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1851.558447] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1851.558843] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1851.558893] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1852.556581] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1852.556788] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1853.556732] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1854.060321] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.060650] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.060800] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.060911] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1854.061835] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-555a446e-2f81-443b-9c61-27ef88a6e71d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.070220] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f73c132-34f9-4360-a522-2d72c80ced58 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.083879] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fb200d-6441-47b2-94be-eb424160c855 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.089862] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5780621-e0e7-4c0d-809e-71395f5bc3c2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.117867] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181513MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1854.118025] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.118195] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.136079] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1855.136361] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1855.149313] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2642bb-d481-49cc-87a3-d109a61e98cf {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.156545] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2eeadee-c145-4fe0-a8a1-9c3b4afc2e5c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.185269] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7bb657-1c1a-4d73-9d4e-090885fe4a11 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.191797] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e399f7e-6cc2-4443-b4dd-14ff9ddd1deb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.204025] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1855.706538] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1855.707790] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1855.707973] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.708255] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1857.708530] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1857.708857] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1858.212063] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1907.557283] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1907.557734] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1910.556901] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1911.557697] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1911.558089] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1913.557671] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.556588] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1914.556827] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1915.556634] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1916.059484] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.059726] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.059883] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.060052] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1916.060980] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7032b09b-ae84-4519-8b6e-5f437fff2dc1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.068970] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843644ce-6bc0-48db-8cd0-11b40b6fd46f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.082229] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f69e1dd-2b1e-4e15-b53e-64100381f175 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.087996] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76b3742-e111-4181-9d40-fb7e394a245e {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.802948] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181521MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1916.803362] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.803362] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.823915] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1917.823915] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1917.836667] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b605d788-36f1-4387-a5ce-1ddceab93a70 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.843797] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bdeb0e3-525c-4648-a559-ef2946b148d1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.871447] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be0e42f-f4ab-44bb-953f-ea408d23df65 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.877950] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753ac313-5a7f-4ffd-87fa-dcc0c3ec03dc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.890176] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.393753] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1918.395306] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1918.395557] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.396241] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.900272] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.900457] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1919.900574] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1920.403519] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1965.558073] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1965.558616] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Cleaning up deleted instances with incomplete migration {{(pid=61978) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1968.060209] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1968.551644] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1971.558025] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1972.557672] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1972.557672] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1974.558058] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1974.558058] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1974.558058] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Cleaning up deleted instances {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1975.061423] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] There are 2 instances to clean {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1975.061691] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a4a9fe67-1fe6-4b10-a898-6a9113fbaa5e] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1975.564909] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] [instance: a48d5ef1-b66b-429e-bbff-2351ad5eda32] Instance has had 0 of 5 cleanup attempts {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1977.067328] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1977.067787] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1977.067787] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1977.571191] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.571432] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.571651] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.571810] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1977.572775] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b026936a-295e-4b90-887c-7ac8eead9107 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.580838] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6535e7a-bd78-4b40-a98c-87abe487f9b3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.594753] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3f12c6-a483-41e5-afc4-b14f40f8e508 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.600559] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03c4399-4361-4d97-81b6-4d67b2cb76b0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.627500] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181521MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1977.627640] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.627824] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1978.648380] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1978.648380] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1978.664134] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1978.678235] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1978.678410] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1978.687846] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1978.704382] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1978.715198] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89734754-15c3-4f9d-acda-3a2d3b60e0d5 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.722079] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024a0014-d253-46c4-a4fb-ff5efd51ceff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.751857] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d8c778-9c9f-4f00-a61b-7fb99b9ee0e3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.758422] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9061c777-d438-42e7-810d-1061abb7bd53 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.770480] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1979.273862] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1979.275106] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1979.275287] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.647s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.764974] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.765261] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1979.765319] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1980.268734] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1985.556943] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2028.060610] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2029.552496] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2032.556372] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2033.556508] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2034.557640] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2034.557640] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2036.557055] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2037.059542] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.059805] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.060215] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.060533] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2037.061965] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27a91e5-cfd0-4fa7-bc46-4a8e982756d2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.070408] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b79a6e-921d-4b53-9eff-6835f4a3a698 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.084734] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d582a818-1d97-445b-b561-fd743d45dbfa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.090811] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449330b1-3d85-41a8-bc5c-51c4e8561a62 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.118267] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181515MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2037.118586] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.119049] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.192097] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2038.192357] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2038.205702] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b436f6eb-1d68-48a0-b787-e228b29449b9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.212818] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab67965e-a71d-4a3f-a998-9c8bef797f53 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.241867] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3c094d-6e84-441e-bd6c-fe7d431cbaff {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.249075] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62380f8f-2157-4f42-bc53-646b3678e900 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.261187] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2038.764391] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2038.765805] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2038.766032] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.647s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.761662] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2041.269979] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2041.269979] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2041.269979] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2041.773418] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2041.773819] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2041.773819] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2089.557987] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2090.552781] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2093.556231] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2094.556992] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2094.557408] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2096.556855] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2096.557237] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2097.060325] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2097.060695] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2097.060904] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.061080] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2097.062009] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5306031-b414-4b6b-8635-1ff7375174a1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.070530] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e72858-758c-4570-9555-7f7b19951002 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.084835] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87b5a1b-1ba1-4d64-83f0-85d001aa8f57 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.090781] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f08b459-7af9-4dcb-bb64-055966d76eb6 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.117720] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181523MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2097.117863] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2097.118044] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.138888] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2098.139203] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2098.151244] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d6b018-6510-4c0b-a16e-b70255246349 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.159179] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f58f9f-d79c-407c-addd-c1022be0c5a1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.188316] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2171597f-c377-4685-8128-dff59a9b02a2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.194785] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db87e334-95bf-48e1-8184-8e4da62a5f55 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.207177] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2098.713720] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2098.715013] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2098.715197] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.597s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2100.714640] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2100.715051] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2101.556672] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2101.556884] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2101.556969] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2102.063368] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2151.557794] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2152.551630] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2154.557971] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2154.558393] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2154.558393] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.556151] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.556540] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2157.059502] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2157.059797] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2157.059932] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2157.060101] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2157.061027] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0536ab3-f415-422f-9582-b1d4fb479fd7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.068962] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e7d199-ce4f-4caf-8748-b3abdc459ab7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.084248] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82955de-8f88-4e6b-af08-46e02a905a8a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.091321] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce70a51-45da-4ea4-a24d-d01096e32026 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.118456] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181529MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2157.118609] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2157.118778] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.137298] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2158.137571] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2158.150179] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ce5bba-f765-4c9d-9fff-0fc52d0e0b31 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.157639] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceae2317-c4c9-4bf1-a0aa-6fa166e1ee04 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.187238] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f833afce-2fb6-4218-b956-e2eb4cbc2e72 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.193895] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9800348a-6eb8-4de9-916b-af92c698ff59 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.206397] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2158.709556] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2158.710807] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2158.710991] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.706499] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2161.210973] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2161.211195] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2161.557123] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2161.557366] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2161.557366] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2162.060154] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2211.559806] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2212.551659] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2214.557971] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.557663] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.557974] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2216.556714] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2216.557180] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2217.060700] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.061036] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2217.061120] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2217.061265] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2217.062178] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e0a600-48c3-4bae-a456-594f514c1979 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.070432] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad0190e-7630-46ba-b4bb-60ad5f2f5f34 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.084304] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1340505f-a296-445e-bc46-eb2ac9417328 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.090146] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d241308-34bc-4b4a-944f-60c311c1ce3b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.117135] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181508MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2217.117292] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.117447] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.135049] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2218.135304] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2218.147670] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb39997-1b28-4e90-bb35-68693df7c7d9 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.155316] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791ac53c-3ff2-4c81-92e9-51c4080daaeb {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.184294] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3c2b48-fe03-40fe-bcc9-2385cc9add75 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.190960] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bc9b9c-f015-4fe9-b833-c1f62d1cd3df {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.203455] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2218.706880] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2218.708165] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2218.708349] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2223.708259] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2223.708678] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2223.708678] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2224.212146] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2224.212394] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2224.212524] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2263.643106] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2263.643585] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Getting list of instances from cluster (obj){ [ 2263.643585] env[61978]: value = "domain-c8" [ 2263.643585] env[61978]: _type = "ClusterComputeResource" [ 2263.643585] env[61978]: } {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2263.644883] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00cbf2f4-ed43-4974-a5b4-452194dac2be {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.653946] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Got total of 0 instances {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2269.556665] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2269.557249] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Cleaning up deleted instances with incomplete migration {{(pid=61978) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 2272.058848] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2273.552145] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2274.557641] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2275.128969] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_power_states {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2275.632296] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Getting list of instances from cluster (obj){ [ 2275.632296] env[61978]: value = "domain-c8" [ 2275.632296] env[61978]: _type = "ClusterComputeResource" [ 2275.632296] env[61978]: } {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2275.633386] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e422d0e-c01d-4b7d-90ee-bf179b9017af {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.641999] env[61978]: DEBUG nova.virt.vmwareapi.vmops [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Got total of 0 instances {{(pid=61978) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2275.642246] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2275.642386] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Cleaning up deleted instances {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2276.145488] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] There are 0 instances to clean {{(pid=61978) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 2276.573629] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2276.573956] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2277.557343] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2278.557974] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2279.060288] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2279.060535] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2279.060706] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2279.060859] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2279.061770] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817caa30-3cd9-497f-8e9c-eda2e8969726 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.069782] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19fcaceb-9d2b-48f2-803e-8a776be8ef75 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.083211] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf1acf1-b221-48a5-9566-0e4dca56a5aa {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.089416] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff050d07-74ce-42cb-8505-00c564e9038f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.116694] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181516MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2279.116828] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2279.117024] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2280.135589] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2280.135856] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2280.151174] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing inventories for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2280.164650] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating ProviderTree inventory for provider 44209228-3464-48ae-bc40-83eccd44b0cf from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2280.164835] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Updating inventory in ProviderTree for provider 44209228-3464-48ae-bc40-83eccd44b0cf with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2280.174420] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing aggregate associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, aggregates: None {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2280.188692] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Refreshing trait associations for resource provider 44209228-3464-48ae-bc40-83eccd44b0cf, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61978) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2280.199346] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa4115f-3132-4e12-af9d-bb4094c0b60d {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.206564] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1e60ef-4492-415e-84b4-096b88c124ca {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.234565] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8518420b-892a-4f6c-89fb-f1daa2c30948 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.240980] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8307078-a557-4e2e-a123-218bc5134bba {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.253013] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2280.756058] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2280.757359] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2280.757561] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.641s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2281.752182] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2283.556272] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2283.556757] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2283.556757] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2284.059856] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2284.060110] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2284.060254] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2292.558054] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2333.060951] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2335.551828] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2336.557286] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2336.557773] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2336.557976] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2338.556497] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2338.556939] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2339.062110] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2339.062370] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2339.062541] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2339.062710] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2339.064007] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3b05ae-2dca-4b1d-a1b4-3441c5fa9862 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.071942] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aafa9b71-b10a-49e3-b221-88dc94eafbec {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.086078] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ecfc70-49c7-4558-a452-c5b44b801674 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.092383] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4fd0d3-9ecd-4570-8e86-3ea5e6215825 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.120292] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181522MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2339.120427] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2339.120614] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2340.275417] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2340.275689] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2340.288906] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f79cf9d-b83c-422e-9ce4-11744f312d80 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.296426] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c807a99d-0282-45f8-8a16-74084e286ad3 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.326395] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f45176e-b0f8-4063-a930-59c7e364ddd0 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.333161] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860b3176-8562-48c0-b6af-4e1de2eb09bc {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.345429] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2340.849232] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2340.850375] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2340.850560] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.730s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2347.851658] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2347.852133] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2347.852133] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2348.355282] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2348.355575] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.355739] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2392.557440] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2395.552587] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2397.557056] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2397.557497] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2397.557497] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2398.556656] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2399.060752] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2399.061144] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2399.061201] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2399.061354] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2399.062374] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26f40fb-9271-43f3-a068-e756b79a5020 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.070590] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5dcdbb9-653a-4d72-9546-ee8e90080cf1 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.084264] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee2dcae-b965-43f1-afb5-278bb293a69a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.090537] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46696015-837b-42dc-b893-13e102a30e2a {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.117648] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181529MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2399.117789] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2399.117975] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2400.136626] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2400.136887] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2400.149288] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2891f6-7c67-4f4e-8fbf-4bc50fc35ea2 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.156860] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c006eeb-7464-4ef4-800e-0c5a5ae23298 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.185784] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74cc3fd9-b2e1-4970-84d9-4f0bd6fea160 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.192450] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1b9865-4cb1-4e86-b06d-65b12edb8d4c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2400.204615] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2400.707807] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2400.709130] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2400.709333] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2402.709565] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2403.213519] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2405.556957] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2405.557375] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2406.558155] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2406.558557] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2406.558557] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2407.061528] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 2454.558100] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2457.552194] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2457.555805] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2458.557084] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2458.557444] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2459.556400] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager.update_available_resource {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2460.060220] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2460.060582] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2460.060630] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2460.060775] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61978) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2460.061750] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad7b193-2fdb-402e-bd4f-ac838f758d2b {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.069908] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab95380-0beb-40a3-98ca-923b04f55d93 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.083283] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4911e57d-19ff-477e-9978-ceb12bd3863c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.089077] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8b678f-1a79-480e-b7fc-fbfcbf913d4c {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.116457] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181529MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=61978) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2460.116597] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2460.116785] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2461.136435] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2461.136674] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61978) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2461.149600] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9dda7a-b6f7-4a9d-a057-0d94ff3f9625 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.156993] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1348038d-d4f3-45e4-a942-e87d7dff4008 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.184944] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717da8c8-d3f7-4878-acb2-405b095f7fa7 {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.191784] env[61978]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864e9b9c-8349-47af-a124-c8337154f07f {{(pid=61978) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.204040] env[61978]: DEBUG nova.compute.provider_tree [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed in ProviderTree for provider: 44209228-3464-48ae-bc40-83eccd44b0cf {{(pid=61978) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2461.706797] env[61978]: DEBUG nova.scheduler.client.report [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Inventory has not changed for provider 44209228-3464-48ae-bc40-83eccd44b0cf based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61978) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2461.708124] env[61978]: DEBUG nova.compute.resource_tracker [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61978) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2461.708315] env[61978]: DEBUG oslo_concurrency.lockutils [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=61978) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2462.709929] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2465.556126] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2465.556500] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61978) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 2466.558164] env[61978]: DEBUG oslo_service.periodic_task [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61978) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2466.559176] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Starting heal instance info cache {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 2466.559176] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Rebuilding the list of instances to heal {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2467.061707] env[61978]: DEBUG nova.compute.manager [None req-464d5d11-e5f5-494c-968c-19d13a87a4ef None None] Didn't find any instances for network info cache update. {{(pid=61978) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}}